| { |
| "best_metric": 4.013113498687744, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/passive/lstm/2/checkpoints/checkpoint-1297440", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1297440, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8206, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.5517, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0492, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.9882, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.943, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.8668, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7139, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994131474623009e-05, |
| "loss": 6.615, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993292879871958e-05, |
| "loss": 6.5194, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992455923001279e-05, |
| "loss": 6.4393, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.991617328250227e-05, |
| "loss": 6.3772, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990778733499175e-05, |
| "loss": 6.3056, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989940138748123e-05, |
| "loss": 6.2402, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989101543997071e-05, |
| "loss": 6.1777, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988262949246018e-05, |
| "loss": 6.1283, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987424354494966e-05, |
| "loss": 6.0656, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986585759743914e-05, |
| "loss": 6.0332, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985747164992862e-05, |
| "loss": 5.9864, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98490857024181e-05, |
| "loss": 5.9467, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984069975490758e-05, |
| "loss": 5.9056, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983231380739706e-05, |
| "loss": 5.8737, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 5.8322, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815558291179756e-05, |
| "loss": 5.8041, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807172343669236e-05, |
| "loss": 5.7614, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798786396158716e-05, |
| "loss": 5.7496, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 5.7106, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 5.6924, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 5.6523, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 5.6354, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 5.6148, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748470711095595e-05, |
| "loss": 5.5896, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974008476358507e-05, |
| "loss": 5.58, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.973169881607455e-05, |
| "loss": 5.5538, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 5.5377, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 5.5294, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706557352346724e-05, |
| "loss": 5.506, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9698171404836204e-05, |
| "loss": 5.4977, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9689785457325684e-05, |
| "loss": 5.4519, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9681399509815164e-05, |
| "loss": 5.4578, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9673013562304644e-05, |
| "loss": 5.4188, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9664627614794124e-05, |
| "loss": 5.4256, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965625804608733e-05, |
| "loss": 5.4043, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964787209857681e-05, |
| "loss": 5.4025, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963948615106629e-05, |
| "loss": 5.3707, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963110020355577e-05, |
| "loss": 5.3789, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962271425604525e-05, |
| "loss": 5.3669, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.961434468733847e-05, |
| "loss": 5.3595, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960595873982795e-05, |
| "loss": 5.3521, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959757279231743e-05, |
| "loss": 5.3231, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958918684480691e-05, |
| "loss": 5.3145, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958080089729639e-05, |
| "loss": 5.3136, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957241494978587e-05, |
| "loss": 5.3003, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956402900227535e-05, |
| "loss": 5.2938, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955564305476483e-05, |
| "loss": 5.271, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954728986486177e-05, |
| "loss": 5.2684, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953890391735125e-05, |
| "loss": 5.2476, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530517969840727e-05, |
| "loss": 5.2694, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522132022330207e-05, |
| "loss": 5.2219, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9513746074819686e-05, |
| "loss": 5.2387, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95053765061129e-05, |
| "loss": 5.228, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2077, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948860461109186e-05, |
| "loss": 5.2068, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948021866358134e-05, |
| "loss": 5.1939, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947184909487455e-05, |
| "loss": 5.1821, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946346314736403e-05, |
| "loss": 5.1751, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945507719985351e-05, |
| "loss": 5.189, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944669125234299e-05, |
| "loss": 5.1628, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943830530483247e-05, |
| "loss": 5.1535, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942993573612568e-05, |
| "loss": 5.144, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942154978861516e-05, |
| "loss": 5.1415, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941316384110464e-05, |
| "loss": 5.1472, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.940477789359412e-05, |
| "loss": 5.1426, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93963919460836e-05, |
| "loss": 5.1174, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388022377376816e-05, |
| "loss": 5.1341, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379636429866296e-05, |
| "loss": 5.1269, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371250482355776e-05, |
| "loss": 5.0984, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362864534845256e-05, |
| "loss": 5.0971, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935447858733473e-05, |
| "loss": 5.0869, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346109018627945e-05, |
| "loss": 5.0785, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337723071117425e-05, |
| "loss": 5.0734, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329337123606905e-05, |
| "loss": 5.0779, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932095117609638e-05, |
| "loss": 5.0732, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 5.0751, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 5.0513, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 5.0322, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 5.0385, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 5.0402, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 5.039, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 5.0211, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925389635361968e-05, |
| "loss": 5.0188, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924551040610916e-05, |
| "loss": 5.0187, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923712445859864e-05, |
| "loss": 5.0121, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922875488989185e-05, |
| "loss": 5.0, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922036894238133e-05, |
| "loss": 4.9985, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921198299487081e-05, |
| "loss": 4.9846, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920361342616403e-05, |
| "loss": 4.9976, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919524385745724e-05, |
| "loss": 4.9831, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918685790994672e-05, |
| "loss": 4.9624, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91784719624362e-05, |
| "loss": 4.9672, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9170086014925676e-05, |
| "loss": 4.9667, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9161700067415156e-05, |
| "loss": 4.9722, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9153314119904636e-05, |
| "loss": 4.9544, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9144928172394116e-05, |
| "loss": 4.9512, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136542224883596e-05, |
| "loss": 4.937, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128172656176805e-05, |
| "loss": 4.9429, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.9218, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.9317, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9103014813645245e-05, |
| "loss": 4.9198, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909462886613473e-05, |
| "loss": 4.9228, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908624291862421e-05, |
| "loss": 4.911, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907785697111369e-05, |
| "loss": 4.9157, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906947102360317e-05, |
| "loss": 4.9017, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906110145489638e-05, |
| "loss": 4.9064, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905271550738586e-05, |
| "loss": 4.8975, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.8909, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903594361236482e-05, |
| "loss": 4.9032, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90275576648543e-05, |
| "loss": 4.8873, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901917171734378e-05, |
| "loss": 4.8853, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901078576983326e-05, |
| "loss": 4.874, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900239982232274e-05, |
| "loss": 4.8793, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899403025361595e-05, |
| "loss": 4.8742, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.898564430610543e-05, |
| "loss": 4.8674, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.897725835859491e-05, |
| "loss": 4.8735, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968872411084396e-05, |
| "loss": 4.8595, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896048646357387e-05, |
| "loss": 4.8598, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.895210051606335e-05, |
| "loss": 4.8395, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.894371456855283e-05, |
| "loss": 4.8452, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.893534499984604e-05, |
| "loss": 4.8457, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892695905233552e-05, |
| "loss": 4.838, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918573104825e-05, |
| "loss": 4.8288, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891018715731448e-05, |
| "loss": 4.8508, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890180120980396e-05, |
| "loss": 4.8343, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889341526229344e-05, |
| "loss": 4.8247, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888502931478292e-05, |
| "loss": 4.8251, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8876659746076134e-05, |
| "loss": 4.8201, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8868273798565614e-05, |
| "loss": 4.822, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8859887851055094e-05, |
| "loss": 4.8172, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8851501903544574e-05, |
| "loss": 4.8133, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8843115956034054e-05, |
| "loss": 4.8159, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8834730008523534e-05, |
| "loss": 4.8135, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8826344061013014e-05, |
| "loss": 4.8021, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8817958113502494e-05, |
| "loss": 4.7927, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880960492359943e-05, |
| "loss": 4.8037, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.7904, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879286578618586e-05, |
| "loss": 4.793, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878447983867534e-05, |
| "loss": 4.7851, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877609389116482e-05, |
| "loss": 4.7825, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87677079436543e-05, |
| "loss": 4.787, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875932199614378e-05, |
| "loss": 4.7761, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875093604863326e-05, |
| "loss": 4.7721, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.7402849197387695, |
| "eval_runtime": 293.2378, |
| "eval_samples_per_second": 1301.302, |
| "eval_steps_per_second": 40.667, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874255010112274e-05, |
| "loss": 4.7586, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873416415361222e-05, |
| "loss": 4.759, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.87257782061017e-05, |
| "loss": 4.7823, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871739225859118e-05, |
| "loss": 4.7667, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709006311080657e-05, |
| "loss": 4.7598, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700620363570137e-05, |
| "loss": 4.756, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692234416059616e-05, |
| "loss": 4.7551, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683848468549096e-05, |
| "loss": 4.7379, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675462521038576e-05, |
| "loss": 4.7479, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667076573528056e-05, |
| "loss": 4.7477, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865870700482127e-05, |
| "loss": 4.7495, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865032105731075e-05, |
| "loss": 4.7478, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864195148860396e-05, |
| "loss": 4.7303, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.863356554109344e-05, |
| "loss": 4.7299, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862517959358292e-05, |
| "loss": 4.7187, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.86167936460724e-05, |
| "loss": 4.7209, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8608407698561874e-05, |
| "loss": 4.7267, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8600021751051354e-05, |
| "loss": 4.7171, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8591635803540834e-05, |
| "loss": 4.7229, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858326623483405e-05, |
| "loss": 4.7366, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857488028732352e-05, |
| "loss": 4.7178, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.856649433981301e-05, |
| "loss": 4.7206, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.855810839230249e-05, |
| "loss": 4.704, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.85497388235957e-05, |
| "loss": 4.724, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541369254888915e-05, |
| "loss": 4.7004, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532983307378395e-05, |
| "loss": 4.7044, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524597359867875e-05, |
| "loss": 4.6906, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851621141235735e-05, |
| "loss": 4.6998, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850782546484683e-05, |
| "loss": 4.6908, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8499455896140044e-05, |
| "loss": 4.6874, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.6928, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8482684001119e-05, |
| "loss": 4.6957, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847429805360848e-05, |
| "loss": 4.6893, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8465912106097964e-05, |
| "loss": 4.6908, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845754253739117e-05, |
| "loss": 4.68, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844915658988065e-05, |
| "loss": 4.6897, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844077064237013e-05, |
| "loss": 4.6626, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843238469485961e-05, |
| "loss": 4.6769, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842399874734909e-05, |
| "loss": 4.6532, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.841561279983857e-05, |
| "loss": 4.6755, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.6572, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839885728362126e-05, |
| "loss": 4.6693, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839047133611074e-05, |
| "loss": 4.6497, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838210176740395e-05, |
| "loss": 4.6681, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.837371581989343e-05, |
| "loss": 4.6645, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836532987238292e-05, |
| "loss": 4.6615, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83569439248724e-05, |
| "loss": 4.6675, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834855797736188e-05, |
| "loss": 4.6374, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834017202985136e-05, |
| "loss": 4.6415, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.833178608234084e-05, |
| "loss": 4.655, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.832340013483032e-05, |
| "loss": 4.6475, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83150141873198e-05, |
| "loss": 4.645, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.830662823980928e-05, |
| "loss": 4.6372, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298258671102486e-05, |
| "loss": 4.6328, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.6271, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.6424, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273100828570926e-05, |
| "loss": 4.6192, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8264714881060406e-05, |
| "loss": 4.6311, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8256328933549886e-05, |
| "loss": 4.635, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8247942986039366e-05, |
| "loss": 4.6181, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8239557038528846e-05, |
| "loss": 4.6194, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823120384862579e-05, |
| "loss": 4.6182, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.822281790111527e-05, |
| "loss": 4.6053, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821443195360475e-05, |
| "loss": 4.6096, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820604600609423e-05, |
| "loss": 4.6276, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819766005858371e-05, |
| "loss": 4.609, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8189274111073184e-05, |
| "loss": 4.6016, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8180888163562664e-05, |
| "loss": 4.6028, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817251859485588e-05, |
| "loss": 4.6033, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816413264734535e-05, |
| "loss": 4.6169, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.815574669983483e-05, |
| "loss": 4.6107, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814736075232432e-05, |
| "loss": 4.5989, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81389748048138e-05, |
| "loss": 4.6144, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813060523610701e-05, |
| "loss": 4.6132, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812221928859649e-05, |
| "loss": 4.5955, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811383334108597e-05, |
| "loss": 4.5952, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810544739357545e-05, |
| "loss": 4.5939, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809706144606493e-05, |
| "loss": 4.5882, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808869187735814e-05, |
| "loss": 4.5865, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808030592984762e-05, |
| "loss": 4.5895, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80719199823371e-05, |
| "loss": 4.596, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806353403482658e-05, |
| "loss": 4.5971, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805514808731606e-05, |
| "loss": 4.5855, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804676213980554e-05, |
| "loss": 4.5624, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803837619229502e-05, |
| "loss": 4.5784, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8029990244784504e-05, |
| "loss": 4.583, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802162067607771e-05, |
| "loss": 4.5864, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801323472856719e-05, |
| "loss": 4.5765, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80048651598604e-05, |
| "loss": 4.5728, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799647921234988e-05, |
| "loss": 4.5787, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798809326483936e-05, |
| "loss": 4.5711, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797970731732884e-05, |
| "loss": 4.5665, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797132136981832e-05, |
| "loss": 4.5731, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79629354223078e-05, |
| "loss": 4.5579, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795454947479728e-05, |
| "loss": 4.576, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794617990609049e-05, |
| "loss": 4.5685, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793779395857997e-05, |
| "loss": 4.5468, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792940801106946e-05, |
| "loss": 4.5608, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792102206355894e-05, |
| "loss": 4.5584, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791263611604842e-05, |
| "loss": 4.5696, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790426654734163e-05, |
| "loss": 4.5567, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7895880599831107e-05, |
| "loss": 4.5517, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887494652320587e-05, |
| "loss": 4.5448, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7879108704810067e-05, |
| "loss": 4.5547, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7870722757299546e-05, |
| "loss": 4.5367, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7862353188592756e-05, |
| "loss": 4.5446, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7853967241082236e-05, |
| "loss": 4.5442, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845581293571715e-05, |
| "loss": 4.5443, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837195346061195e-05, |
| "loss": 4.5342, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7828809398550675e-05, |
| "loss": 4.5442, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7820423451040155e-05, |
| "loss": 4.5361, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7812037503529635e-05, |
| "loss": 4.5378, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803651556019115e-05, |
| "loss": 4.5388, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795281987312324e-05, |
| "loss": 4.5295, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786896039801804e-05, |
| "loss": 4.5529, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7778510092291284e-05, |
| "loss": 4.5383, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777014052358449e-05, |
| "loss": 4.5356, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.776175457607397e-05, |
| "loss": 4.5252, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775336862856345e-05, |
| "loss": 4.5332, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774498268105293e-05, |
| "loss": 4.5307, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773659673354241e-05, |
| "loss": 4.5222, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772821078603189e-05, |
| "loss": 4.5387, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771984121732511e-05, |
| "loss": 4.5258, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771145526981459e-05, |
| "loss": 4.5203, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770306932230407e-05, |
| "loss": 4.5165, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769468337479355e-05, |
| "loss": 4.5162, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768629742728303e-05, |
| "loss": 4.5163, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767791147977251e-05, |
| "loss": 4.5189, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766952553226199e-05, |
| "loss": 4.5055, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766113958475147e-05, |
| "loss": 4.5329, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765277001604468e-05, |
| "loss": 4.5228, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764440044733789e-05, |
| "loss": 4.5119, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763601449982737e-05, |
| "loss": 4.511, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762762855231685e-05, |
| "loss": 4.5062, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761924260480633e-05, |
| "loss": 4.5181, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610856657295813e-05, |
| "loss": 4.5148, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602470709785293e-05, |
| "loss": 4.5101, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75941011410785e-05, |
| "loss": 4.5149, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758571519356798e-05, |
| "loss": 4.5104, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757734562486119e-05, |
| "loss": 4.5089, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756895967735067e-05, |
| "loss": 4.4995, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756057372984015e-05, |
| "loss": 4.5121, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755218778232963e-05, |
| "loss": 4.501, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754380183481911e-05, |
| "loss": 4.504, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753541588730859e-05, |
| "loss": 4.499, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752702993979807e-05, |
| "loss": 4.5013, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751864399228755e-05, |
| "loss": 4.5006, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751025804477703e-05, |
| "loss": 4.4889, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750188847607025e-05, |
| "loss": 4.4938, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.468594074249268, |
| "eval_runtime": 293.1863, |
| "eval_samples_per_second": 1301.531, |
| "eval_steps_per_second": 40.674, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749350252855973e-05, |
| "loss": 4.4813, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748511658104921e-05, |
| "loss": 4.4803, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747673063353868e-05, |
| "loss": 4.5069, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746834468602816e-05, |
| "loss": 4.4979, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745995873851764e-05, |
| "loss": 4.4866, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745157279100712e-05, |
| "loss": 4.486, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74431868434966e-05, |
| "loss": 4.488, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.743480089598608e-05, |
| "loss": 4.4733, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.742641494847556e-05, |
| "loss": 4.4907, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741802900096504e-05, |
| "loss": 4.4829, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740964305345452e-05, |
| "loss": 4.4869, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401257105944e-05, |
| "loss": 4.4882, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392887537237216e-05, |
| "loss": 4.4709, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384501589726696e-05, |
| "loss": 4.4747, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376115642216176e-05, |
| "loss": 4.4685, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7367729694705656e-05, |
| "loss": 4.4692, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7359343747195135e-05, |
| "loss": 4.473, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7350957799684615e-05, |
| "loss": 4.4655, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7342571852174095e-05, |
| "loss": 4.4732, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733418590466357e-05, |
| "loss": 4.4934, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732579995715305e-05, |
| "loss": 4.4702, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317430388446264e-05, |
| "loss": 4.4742, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730904444093574e-05, |
| "loss": 4.468, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730065849342522e-05, |
| "loss": 4.4812, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292272545914704e-05, |
| "loss": 4.4606, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7283886598404184e-05, |
| "loss": 4.4703, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7275500650893664e-05, |
| "loss": 4.4543, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7267114703383144e-05, |
| "loss": 4.4588, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258728755872624e-05, |
| "loss": 4.4567, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725035918716583e-05, |
| "loss": 4.4534, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724197323965531e-05, |
| "loss": 4.4594, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723358729214479e-05, |
| "loss": 4.4657, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722520134463427e-05, |
| "loss": 4.4593, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721683177592748e-05, |
| "loss": 4.4649, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720846220722069e-05, |
| "loss": 4.4488, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720007625971017e-05, |
| "loss": 4.4673, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719169031219965e-05, |
| "loss": 4.4346, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718330436468914e-05, |
| "loss": 4.4572, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717491841717862e-05, |
| "loss": 4.4322, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71665324696681e-05, |
| "loss": 4.4539, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715814652215758e-05, |
| "loss": 4.4405, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714976057464706e-05, |
| "loss": 4.4541, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714139100594027e-05, |
| "loss": 4.4352, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7133021437233476e-05, |
| "loss": 4.4473, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7124635489722956e-05, |
| "loss": 4.4495, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7116249542212436e-05, |
| "loss": 4.4501, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107863594701916e-05, |
| "loss": 4.4512, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099477647191396e-05, |
| "loss": 4.4295, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091091699680876e-05, |
| "loss": 4.4266, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082705752170356e-05, |
| "loss": 4.4488, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074319804659836e-05, |
| "loss": 4.4405, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706595023595305e-05, |
| "loss": 4.4344, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705756428844253e-05, |
| "loss": 4.4331, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704917834093201e-05, |
| "loss": 4.429, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704079239342149e-05, |
| "loss": 4.4185, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70324228247147e-05, |
| "loss": 4.4373, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702405325600791e-05, |
| "loss": 4.4161, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701566730849739e-05, |
| "loss": 4.4264, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700728136098687e-05, |
| "loss": 4.4377, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699889541347635e-05, |
| "loss": 4.4191, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699050946596583e-05, |
| "loss": 4.4202, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698212351845531e-05, |
| "loss": 4.4228, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697373757094479e-05, |
| "loss": 4.4064, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6965351623434276e-05, |
| "loss": 4.41, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6956982054727485e-05, |
| "loss": 4.4309, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6948596107216965e-05, |
| "loss": 4.4157, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6940210159706445e-05, |
| "loss": 4.4091, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6931824212195925e-05, |
| "loss": 4.4105, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69234382646854e-05, |
| "loss": 4.4147, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691505231717488e-05, |
| "loss": 4.4253, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690666636966436e-05, |
| "loss": 4.4222, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689828042215384e-05, |
| "loss": 4.4104, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688989447464332e-05, |
| "loss": 4.4255, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68815085271328e-05, |
| "loss": 4.4213, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687312257962228e-05, |
| "loss": 4.4096, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686473663211176e-05, |
| "loss": 4.4071, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856367063404974e-05, |
| "loss": 4.4138, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6847981115894454e-05, |
| "loss": 4.406, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6839595168383934e-05, |
| "loss": 4.4043, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6831209220873414e-05, |
| "loss": 4.406, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682283965216662e-05, |
| "loss": 4.4132, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68144537046561e-05, |
| "loss": 4.4174, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680606775714558e-05, |
| "loss": 4.4032, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679768180963506e-05, |
| "loss": 4.389, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678929586212454e-05, |
| "loss": 4.3971, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678090991461402e-05, |
| "loss": 4.4031, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677254034590723e-05, |
| "loss": 4.4097, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676415439839671e-05, |
| "loss": 4.3995, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675576845088619e-05, |
| "loss": 4.3979, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674738250337568e-05, |
| "loss": 4.4082, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673899655586516e-05, |
| "loss": 4.3933, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673062698715837e-05, |
| "loss": 4.3946, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.672224103964785e-05, |
| "loss": 4.3985, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.671385509213733e-05, |
| "loss": 4.3899, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670546914462681e-05, |
| "loss": 4.4069, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.669708319711629e-05, |
| "loss": 4.3936, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668869724960576e-05, |
| "loss": 4.3809, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668031130209524e-05, |
| "loss": 4.3924, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6671941733388456e-05, |
| "loss": 4.3895, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663555785877936e-05, |
| "loss": 4.4059, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6655186217171145e-05, |
| "loss": 4.3932, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664680026966063e-05, |
| "loss": 4.3851, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663841432215011e-05, |
| "loss": 4.3777, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663002837463959e-05, |
| "loss": 4.3908, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6621642427129065e-05, |
| "loss": 4.3738, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6613256479618545e-05, |
| "loss": 4.3838, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6604870532108025e-05, |
| "loss": 4.3826, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6596484584597505e-05, |
| "loss": 4.3815, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6588098637086985e-05, |
| "loss": 4.3703, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6579712689576465e-05, |
| "loss": 4.3896, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6571326742065945e-05, |
| "loss": 4.3745, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6562940794555425e-05, |
| "loss": 4.3746, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6554554847044905e-05, |
| "loss": 4.3826, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6546185278338114e-05, |
| "loss": 4.3708, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653781570963133e-05, |
| "loss": 4.3925, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652942976212081e-05, |
| "loss": 4.3814, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652104381461029e-05, |
| "loss": 4.3816, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651265786709977e-05, |
| "loss": 4.3709, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650427191958925e-05, |
| "loss": 4.3764, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649590235088246e-05, |
| "loss": 4.3765, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648751640337194e-05, |
| "loss": 4.3663, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647913045586142e-05, |
| "loss": 4.3814, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64707445083509e-05, |
| "loss": 4.3756, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.646235856084038e-05, |
| "loss": 4.3677, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645397261332986e-05, |
| "loss": 4.3699, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644558666581934e-05, |
| "loss": 4.3683, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643720071830882e-05, |
| "loss": 4.3599, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6428831149602034e-05, |
| "loss": 4.3733, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642046158089524e-05, |
| "loss": 4.3581, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641207563338472e-05, |
| "loss": 4.3795, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64036896858742e-05, |
| "loss": 4.3767, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639530373836368e-05, |
| "loss": 4.3633, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638693416965689e-05, |
| "loss": 4.3616, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637854822214637e-05, |
| "loss": 4.362, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637016227463585e-05, |
| "loss": 4.3713, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636177632712533e-05, |
| "loss": 4.3675, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635339037961481e-05, |
| "loss": 4.3657, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634502081090802e-05, |
| "loss": 4.3682, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633663486339751e-05, |
| "loss": 4.3681, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632824891588699e-05, |
| "loss": 4.3675, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631986296837647e-05, |
| "loss": 4.3531, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631147702086595e-05, |
| "loss": 4.3731, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630310745215916e-05, |
| "loss": 4.3616, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6294737883452366e-05, |
| "loss": 4.3583, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6286351935941846e-05, |
| "loss": 4.3602, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277965988431326e-05, |
| "loss": 4.3614, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269580040920806e-05, |
| "loss": 4.3575, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261194093410286e-05, |
| "loss": 4.3473, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252808145899766e-05, |
| "loss": 4.3497, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.337387561798096, |
| "eval_runtime": 294.5321, |
| "eval_samples_per_second": 1295.584, |
| "eval_steps_per_second": 40.488, |
| "step": 228960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6244422198389246e-05, |
| "loss": 4.3456, |
| "step": 229376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6236036250878726e-05, |
| "loss": 4.3415, |
| "step": 229888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6227650303368205e-05, |
| "loss": 4.3677, |
| "step": 230400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6219264355857685e-05, |
| "loss": 4.3592, |
| "step": 230912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6210878408347165e-05, |
| "loss": 4.3566, |
| "step": 231424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6202492460836645e-05, |
| "loss": 4.3438, |
| "step": 231936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6194106513326125e-05, |
| "loss": 4.3543, |
| "step": 232448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6185720565815605e-05, |
| "loss": 4.3369, |
| "step": 232960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6177334618305085e-05, |
| "loss": 4.3582, |
| "step": 233472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6168948670794565e-05, |
| "loss": 4.3503, |
| "step": 233984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6160562723284045e-05, |
| "loss": 4.3497, |
| "step": 234496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6152176775773525e-05, |
| "loss": 4.3589, |
| "step": 235008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6143807207066734e-05, |
| "loss": 4.337, |
| "step": 235520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6135421259556214e-05, |
| "loss": 4.3454, |
| "step": 236032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6127035312045694e-05, |
| "loss": 4.334, |
| "step": 236544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6118649364535174e-05, |
| "loss": 4.3405, |
| "step": 237056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6110263417024654e-05, |
| "loss": 4.3385, |
| "step": 237568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.610187746951414e-05, |
| "loss": 4.3373, |
| "step": 238080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6093491522003614e-05, |
| "loss": 4.3406, |
| "step": 238592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.608512195329683e-05, |
| "loss": 4.3606, |
| "step": 239104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.607673600578631e-05, |
| "loss": 4.3408, |
| "step": 239616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606835005827578e-05, |
| "loss": 4.3454, |
| "step": 240128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.605996411076526e-05, |
| "loss": 4.3405, |
| "step": 240640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.605159454205848e-05, |
| "loss": 4.3532, |
| "step": 241152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.604320859454796e-05, |
| "loss": 4.3331, |
| "step": 241664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.603482264703743e-05, |
| "loss": 4.3407, |
| "step": 242176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.602643669952691e-05, |
| "loss": 4.3299, |
| "step": 242688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.601805075201639e-05, |
| "loss": 4.33, |
| "step": 243200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.600966480450588e-05, |
| "loss": 4.3291, |
| "step": 243712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.600127885699536e-05, |
| "loss": 4.3346, |
| "step": 244224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.599289290948484e-05, |
| "loss": 4.3326, |
| "step": 244736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.598450696197432e-05, |
| "loss": 4.3404, |
| "step": 245248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.597613739326753e-05, |
| "loss": 4.3351, |
| "step": 245760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.596775144575701e-05, |
| "loss": 4.337, |
| "step": 246272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595936549824649e-05, |
| "loss": 4.331, |
| "step": 246784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595097955073597e-05, |
| "loss": 4.3413, |
| "step": 247296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5942609982029176e-05, |
| "loss": 4.3191, |
| "step": 247808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5934224034518656e-05, |
| "loss": 4.332, |
| "step": 248320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5925838087008136e-05, |
| "loss": 4.3097, |
| "step": 248832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5917452139497616e-05, |
| "loss": 4.329, |
| "step": 249344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5909066191987096e-05, |
| "loss": 4.3225, |
| "step": 249856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5900680244476576e-05, |
| "loss": 4.3361, |
| "step": 250368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.589229429696606e-05, |
| "loss": 4.3157, |
| "step": 250880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.588390834945554e-05, |
| "loss": 4.3273, |
| "step": 251392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.587555515955248e-05, |
| "loss": 4.3298, |
| "step": 251904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.586716921204196e-05, |
| "loss": 4.3327, |
| "step": 252416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585879964333517e-05, |
| "loss": 4.3274, |
| "step": 252928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585041369582465e-05, |
| "loss": 4.3155, |
| "step": 253440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.584202774831413e-05, |
| "loss": 4.3055, |
| "step": 253952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.583364180080361e-05, |
| "loss": 4.3294, |
| "step": 254464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.582525585329309e-05, |
| "loss": 4.3249, |
| "step": 254976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.581686990578257e-05, |
| "loss": 4.3175, |
| "step": 255488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.580848395827205e-05, |
| "loss": 4.314, |
| "step": 256000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.580009801076153e-05, |
| "loss": 4.3152, |
| "step": 256512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5791712063251017e-05, |
| "loss": 4.2995, |
| "step": 257024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5783342494544226e-05, |
| "loss": 4.3246, |
| "step": 257536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5774956547033706e-05, |
| "loss": 4.2991, |
| "step": 258048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5766570599523186e-05, |
| "loss": 4.3125, |
| "step": 258560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5758184652012666e-05, |
| "loss": 4.3201, |
| "step": 259072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5749798704502146e-05, |
| "loss": 4.3085, |
| "step": 259584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5741429135795355e-05, |
| "loss": 4.3036, |
| "step": 260096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5733043188284835e-05, |
| "loss": 4.3106, |
| "step": 260608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5724657240774315e-05, |
| "loss": 4.2951, |
| "step": 261120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5716271293263794e-05, |
| "loss": 4.3025, |
| "step": 261632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.570788534575327e-05, |
| "loss": 4.314, |
| "step": 262144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5699515777046484e-05, |
| "loss": 4.3045, |
| "step": 262656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5691129829535963e-05, |
| "loss": 4.2974, |
| "step": 263168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5682743882025443e-05, |
| "loss": 4.2966, |
| "step": 263680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5674357934514923e-05, |
| "loss": 4.3046, |
| "step": 264192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.56659719870044e-05, |
| "loss": 4.3094, |
| "step": 264704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.565758603949388e-05, |
| "loss": 4.3111, |
| "step": 265216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564921647078709e-05, |
| "loss": 4.3035, |
| "step": 265728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564083052327657e-05, |
| "loss": 4.3132, |
| "step": 266240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.563244457576605e-05, |
| "loss": 4.3137, |
| "step": 266752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.562405862825553e-05, |
| "loss": 4.2984, |
| "step": 267264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.561568905954874e-05, |
| "loss": 4.2969, |
| "step": 267776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.560730311203822e-05, |
| "loss": 4.3066, |
| "step": 268288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559891716452771e-05, |
| "loss": 4.3015, |
| "step": 268800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559053121701719e-05, |
| "loss": 4.2992, |
| "step": 269312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.55821616483104e-05, |
| "loss": 4.2977, |
| "step": 269824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.557377570079988e-05, |
| "loss": 4.3051, |
| "step": 270336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.556538975328936e-05, |
| "loss": 4.3084, |
| "step": 270848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.555700380577884e-05, |
| "loss": 4.2993, |
| "step": 271360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554861785826832e-05, |
| "loss": 4.2852, |
| "step": 271872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5540248289561526e-05, |
| "loss": 4.287, |
| "step": 272384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5531862342051006e-05, |
| "loss": 4.2956, |
| "step": 272896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5523476394540486e-05, |
| "loss": 4.3041, |
| "step": 273408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5515090447029966e-05, |
| "loss": 4.2966, |
| "step": 273920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5506720878323175e-05, |
| "loss": 4.2875, |
| "step": 274432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5498334930812655e-05, |
| "loss": 4.3015, |
| "step": 274944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.548994898330214e-05, |
| "loss": 4.2926, |
| "step": 275456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.548156303579162e-05, |
| "loss": 4.2889, |
| "step": 275968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54731770882811e-05, |
| "loss": 4.2903, |
| "step": 276480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.546480751957431e-05, |
| "loss": 4.295, |
| "step": 276992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.545642157206379e-05, |
| "loss": 4.3046, |
| "step": 277504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.544803562455327e-05, |
| "loss": 4.2842, |
| "step": 278016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543964967704275e-05, |
| "loss": 4.2767, |
| "step": 278528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543126372953223e-05, |
| "loss": 4.2932, |
| "step": 279040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.542287778202171e-05, |
| "loss": 4.2885, |
| "step": 279552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.541449183451119e-05, |
| "loss": 4.3016, |
| "step": 280064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54061222658044e-05, |
| "loss": 4.2949, |
| "step": 280576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.539773631829388e-05, |
| "loss": 4.2814, |
| "step": 281088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538935037078336e-05, |
| "loss": 4.2765, |
| "step": 281600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538096442327284e-05, |
| "loss": 4.2919, |
| "step": 282112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5372578475762326e-05, |
| "loss": 4.2723, |
| "step": 282624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5364192528251806e-05, |
| "loss": 4.2831, |
| "step": 283136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.535580658074128e-05, |
| "loss": 4.2785, |
| "step": 283648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5347437012034495e-05, |
| "loss": 4.2865, |
| "step": 284160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5339051064523975e-05, |
| "loss": 4.2703, |
| "step": 284672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5330665117013455e-05, |
| "loss": 4.2896, |
| "step": 285184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5322295548306664e-05, |
| "loss": 4.2745, |
| "step": 285696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5313909600796144e-05, |
| "loss": 4.2729, |
| "step": 286208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5305523653285624e-05, |
| "loss": 4.2861, |
| "step": 286720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5297137705775104e-05, |
| "loss": 4.2715, |
| "step": 287232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.528875175826458e-05, |
| "loss": 4.2964, |
| "step": 287744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5280365810754064e-05, |
| "loss": 4.2857, |
| "step": 288256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5271979863243544e-05, |
| "loss": 4.282, |
| "step": 288768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5263593915733024e-05, |
| "loss": 4.2743, |
| "step": 289280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5255207968222504e-05, |
| "loss": 4.2772, |
| "step": 289792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5246822020711984e-05, |
| "loss": 4.2796, |
| "step": 290304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5238436073201464e-05, |
| "loss": 4.2711, |
| "step": 290816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5230050125690944e-05, |
| "loss": 4.285, |
| "step": 291328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.522168055698415e-05, |
| "loss": 4.2775, |
| "step": 291840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.521329460947363e-05, |
| "loss": 4.272, |
| "step": 292352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.520492504076684e-05, |
| "loss": 4.2733, |
| "step": 292864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.519653909325632e-05, |
| "loss": 4.2738, |
| "step": 293376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51881531457458e-05, |
| "loss": 4.267, |
| "step": 293888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.517976719823528e-05, |
| "loss": 4.2818, |
| "step": 294400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51713976295285e-05, |
| "loss": 4.2616, |
| "step": 294912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.516301168201798e-05, |
| "loss": 4.284, |
| "step": 295424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.515462573450746e-05, |
| "loss": 4.2771, |
| "step": 295936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.514623978699694e-05, |
| "loss": 4.2774, |
| "step": 296448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5137870218290147e-05, |
| "loss": 4.2674, |
| "step": 296960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5129484270779626e-05, |
| "loss": 4.2707, |
| "step": 297472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5121098323269106e-05, |
| "loss": 4.2796, |
| "step": 297984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5112712375758586e-05, |
| "loss": 4.2771, |
| "step": 298496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5104326428248066e-05, |
| "loss": 4.2686, |
| "step": 299008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5095940480737546e-05, |
| "loss": 4.2748, |
| "step": 299520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5087554533227026e-05, |
| "loss": 4.2728, |
| "step": 300032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5079168585716506e-05, |
| "loss": 4.2783, |
| "step": 300544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5070782638205986e-05, |
| "loss": 4.2637, |
| "step": 301056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5062396690695466e-05, |
| "loss": 4.282, |
| "step": 301568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5054010743184946e-05, |
| "loss": 4.2726, |
| "step": 302080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.504564117447816e-05, |
| "loss": 4.2688, |
| "step": 302592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.503725522696764e-05, |
| "loss": 4.2677, |
| "step": 303104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5028869279457115e-05, |
| "loss": 4.2746, |
| "step": 303616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502049971075033e-05, |
| "loss": 4.2644, |
| "step": 304128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.501211376323981e-05, |
| "loss": 4.2543, |
| "step": 304640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.500372781572929e-05, |
| "loss": 4.2699, |
| "step": 305152 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.256018161773682, |
| "eval_runtime": 291.5284, |
| "eval_samples_per_second": 1308.933, |
| "eval_steps_per_second": 40.905, |
| "step": 305280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.4995341868218764e-05, |
| "loss": 4.2624, |
| "step": 305664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.4986955920708244e-05, |
| "loss": 4.2517, |
| "step": 306176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.4978569973197724e-05, |
| "loss": 4.2775, |
| "step": 306688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.497020040449093e-05, |
| "loss": 4.2703, |
| "step": 307200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.496181445698042e-05, |
| "loss": 4.269, |
| "step": 307712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.49534285094699e-05, |
| "loss": 4.2614, |
| "step": 308224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.494504256195938e-05, |
| "loss": 4.2643, |
| "step": 308736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.493667299325259e-05, |
| "loss": 4.245, |
| "step": 309248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.492828704574207e-05, |
| "loss": 4.2699, |
| "step": 309760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.491990109823155e-05, |
| "loss": 4.265, |
| "step": 310272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.491151515072103e-05, |
| "loss": 4.2611, |
| "step": 310784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.490312920321051e-05, |
| "loss": 4.2689, |
| "step": 311296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.489475963450372e-05, |
| "loss": 4.2513, |
| "step": 311808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.48863736869932e-05, |
| "loss": 4.258, |
| "step": 312320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.487798773948268e-05, |
| "loss": 4.2515, |
| "step": 312832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.486960179197216e-05, |
| "loss": 4.2492, |
| "step": 313344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.486121584446164e-05, |
| "loss": 4.2536, |
| "step": 313856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.485282989695112e-05, |
| "loss": 4.252, |
| "step": 314368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.4844443949440604e-05, |
| "loss": 4.2547, |
| "step": 314880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.4836058001930084e-05, |
| "loss": 4.2733, |
| "step": 315392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.482768843322329e-05, |
| "loss": 4.2594, |
| "step": 315904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.481930248571277e-05, |
| "loss": 4.2636, |
| "step": 316416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.481091653820225e-05, |
| "loss": 4.2544, |
| "step": 316928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.480254696949546e-05, |
| "loss": 4.2641, |
| "step": 317440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.479416102198494e-05, |
| "loss": 4.2509, |
| "step": 317952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.478577507447442e-05, |
| "loss": 4.2562, |
| "step": 318464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.47773891269639e-05, |
| "loss": 4.2499, |
| "step": 318976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.476900317945338e-05, |
| "loss": 4.2432, |
| "step": 319488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.476063361074659e-05, |
| "loss": 4.2428, |
| "step": 320000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.475224766323607e-05, |
| "loss": 4.2518, |
| "step": 320512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.474386171572556e-05, |
| "loss": 4.2502, |
| "step": 321024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.473547576821504e-05, |
| "loss": 4.2576, |
| "step": 321536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.472708982070452e-05, |
| "loss": 4.2546, |
| "step": 322048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4718703873194e-05, |
| "loss": 4.2558, |
| "step": 322560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.471033430448721e-05, |
| "loss": 4.2459, |
| "step": 323072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.470194835697669e-05, |
| "loss": 4.2598, |
| "step": 323584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4693578788269896e-05, |
| "loss": 4.2405, |
| "step": 324096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4685192840759376e-05, |
| "loss": 4.2466, |
| "step": 324608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4676806893248856e-05, |
| "loss": 4.2316, |
| "step": 325120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4668420945738336e-05, |
| "loss": 4.2477, |
| "step": 325632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4660034998227816e-05, |
| "loss": 4.2439, |
| "step": 326144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4651649050717296e-05, |
| "loss": 4.2523, |
| "step": 326656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4643263103206776e-05, |
| "loss": 4.2367, |
| "step": 327168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4634877155696256e-05, |
| "loss": 4.2473, |
| "step": 327680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4626491208185736e-05, |
| "loss": 4.2493, |
| "step": 328192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4618105260675215e-05, |
| "loss": 4.2517, |
| "step": 328704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4609719313164695e-05, |
| "loss": 4.2482, |
| "step": 329216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4601349744457905e-05, |
| "loss": 4.2353, |
| "step": 329728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4592963796947384e-05, |
| "loss": 4.2249, |
| "step": 330240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4584577849436864e-05, |
| "loss": 4.2531, |
| "step": 330752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4576191901926344e-05, |
| "loss": 4.2457, |
| "step": 331264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4567805954415824e-05, |
| "loss": 4.2382, |
| "step": 331776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4559420006905304e-05, |
| "loss": 4.2378, |
| "step": 332288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4551034059394784e-05, |
| "loss": 4.2371, |
| "step": 332800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4542648111884264e-05, |
| "loss": 4.2224, |
| "step": 333312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.453427854317748e-05, |
| "loss": 4.2402, |
| "step": 333824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.452589259566696e-05, |
| "loss": 4.2252, |
| "step": 334336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.451750664815644e-05, |
| "loss": 4.2323, |
| "step": 334848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.450912070064592e-05, |
| "loss": 4.2451, |
| "step": 335360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.450075113193913e-05, |
| "loss": 4.2303, |
| "step": 335872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.449236518442861e-05, |
| "loss": 4.2259, |
| "step": 336384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.448397923691809e-05, |
| "loss": 4.2393, |
| "step": 336896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.447559328940757e-05, |
| "loss": 4.2169, |
| "step": 337408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.446720734189705e-05, |
| "loss": 4.2264, |
| "step": 337920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.445882139438653e-05, |
| "loss": 4.2354, |
| "step": 338432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.445043544687601e-05, |
| "loss": 4.2309, |
| "step": 338944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.444204949936548e-05, |
| "loss": 4.2222, |
| "step": 339456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.443366355185496e-05, |
| "loss": 4.2231, |
| "step": 339968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.442529398314818e-05, |
| "loss": 4.2249, |
| "step": 340480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.441690803563766e-05, |
| "loss": 4.2335, |
| "step": 340992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.440852208812714e-05, |
| "loss": 4.2343, |
| "step": 341504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.440013614061662e-05, |
| "loss": 4.2284, |
| "step": 342016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4391766571909834e-05, |
| "loss": 4.238, |
| "step": 342528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.4383380624399314e-05, |
| "loss": 4.2404, |
| "step": 343040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.437499467688879e-05, |
| "loss": 4.225, |
| "step": 343552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.436664148698573e-05, |
| "loss": 4.2183, |
| "step": 344064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.435825553947521e-05, |
| "loss": 4.237, |
| "step": 344576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.434986959196469e-05, |
| "loss": 4.2296, |
| "step": 345088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.434148364445417e-05, |
| "loss": 4.2204, |
| "step": 345600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.433309769694365e-05, |
| "loss": 4.2254, |
| "step": 346112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.432471174943313e-05, |
| "loss": 4.2303, |
| "step": 346624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.431634218072635e-05, |
| "loss": 4.2391, |
| "step": 347136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.430795623321583e-05, |
| "loss": 4.2207, |
| "step": 347648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.429957028570531e-05, |
| "loss": 4.2117, |
| "step": 348160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.429118433819479e-05, |
| "loss": 4.2216, |
| "step": 348672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.428279839068426e-05, |
| "loss": 4.2176, |
| "step": 349184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.427441244317374e-05, |
| "loss": 4.2306, |
| "step": 349696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.426602649566322e-05, |
| "loss": 4.2267, |
| "step": 350208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.42576405481527e-05, |
| "loss": 4.2138, |
| "step": 350720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.424925460064218e-05, |
| "loss": 4.2309, |
| "step": 351232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.424086865313166e-05, |
| "loss": 4.2201, |
| "step": 351744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.423248270562114e-05, |
| "loss": 4.2223, |
| "step": 352256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.422409675811062e-05, |
| "loss": 4.2152, |
| "step": 352768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4215727189403836e-05, |
| "loss": 4.2225, |
| "step": 353280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4207341241893316e-05, |
| "loss": 4.2342, |
| "step": 353792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4198955294382796e-05, |
| "loss": 4.2192, |
| "step": 354304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4190569346872276e-05, |
| "loss": 4.2028, |
| "step": 354816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4182199778165485e-05, |
| "loss": 4.2181, |
| "step": 355328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4173813830654965e-05, |
| "loss": 4.2198, |
| "step": 355840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4165427883144445e-05, |
| "loss": 4.2254, |
| "step": 356352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4157041935633925e-05, |
| "loss": 4.2287, |
| "step": 356864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4148655988123405e-05, |
| "loss": 4.2125, |
| "step": 357376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4140270040612885e-05, |
| "loss": 4.2086, |
| "step": 357888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4131884093102365e-05, |
| "loss": 4.2207, |
| "step": 358400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4123514524395574e-05, |
| "loss": 4.1986, |
| "step": 358912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4115128576885054e-05, |
| "loss": 4.215, |
| "step": 359424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4106742629374534e-05, |
| "loss": 4.2089, |
| "step": 359936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.409835668186402e-05, |
| "loss": 4.2173, |
| "step": 360448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.40899707343535e-05, |
| "loss": 4.2046, |
| "step": 360960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4081584786842974e-05, |
| "loss": 4.2201, |
| "step": 361472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.4073198839332453e-05, |
| "loss": 4.2048, |
| "step": 361984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.406482927062567e-05, |
| "loss": 4.2035, |
| "step": 362496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.405644332311514e-05, |
| "loss": 4.2185, |
| "step": 363008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.404805737560462e-05, |
| "loss": 4.2076, |
| "step": 363520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.40396714280941e-05, |
| "loss": 4.2269, |
| "step": 364032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.403128548058358e-05, |
| "loss": 4.2169, |
| "step": 364544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.402293229068053e-05, |
| "loss": 4.2101, |
| "step": 365056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.401454634317001e-05, |
| "loss": 4.2159, |
| "step": 365568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.400616039565949e-05, |
| "loss": 4.2031, |
| "step": 366080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.399777444814897e-05, |
| "loss": 4.2133, |
| "step": 366592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.398938850063845e-05, |
| "loss": 4.2043, |
| "step": 367104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.398100255312793e-05, |
| "loss": 4.2203, |
| "step": 367616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.397263298442114e-05, |
| "loss": 4.2063, |
| "step": 368128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3964247036910616e-05, |
| "loss": 4.2041, |
| "step": 368640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3955861089400096e-05, |
| "loss": 4.205, |
| "step": 369152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3947475141889576e-05, |
| "loss": 4.2093, |
| "step": 369664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3939089194379056e-05, |
| "loss": 4.1999, |
| "step": 370176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3930703246868536e-05, |
| "loss": 4.2149, |
| "step": 370688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3922317299358016e-05, |
| "loss": 4.1964, |
| "step": 371200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3913931351847496e-05, |
| "loss": 4.2167, |
| "step": 371712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3905545404336976e-05, |
| "loss": 4.21, |
| "step": 372224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.389717583563019e-05, |
| "loss": 4.2113, |
| "step": 372736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.38888062669234e-05, |
| "loss": 4.2027, |
| "step": 373248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.388042031941288e-05, |
| "loss": 4.2042, |
| "step": 373760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.387203437190236e-05, |
| "loss": 4.2159, |
| "step": 374272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.386364842439184e-05, |
| "loss": 4.2077, |
| "step": 374784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.385526247688132e-05, |
| "loss": 4.2003, |
| "step": 375296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.38468765293708e-05, |
| "loss": 4.2116, |
| "step": 375808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.383849058186028e-05, |
| "loss": 4.2067, |
| "step": 376320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.383010463434976e-05, |
| "loss": 4.2158, |
| "step": 376832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.382171868683924e-05, |
| "loss": 4.2013, |
| "step": 377344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.381333273932872e-05, |
| "loss": 4.2138, |
| "step": 377856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.38049467918182e-05, |
| "loss": 4.206, |
| "step": 378368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.379657722311141e-05, |
| "loss": 4.2031, |
| "step": 378880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3788207654404625e-05, |
| "loss": 4.2048, |
| "step": 379392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3779821706894105e-05, |
| "loss": 4.2052, |
| "step": 379904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3771435759383585e-05, |
| "loss": 4.2049, |
| "step": 380416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3763066190676794e-05, |
| "loss": 4.1903, |
| "step": 380928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3754680243166274e-05, |
| "loss": 4.2042, |
| "step": 381440 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.200494289398193, |
| "eval_runtime": 292.0312, |
| "eval_samples_per_second": 1306.679, |
| "eval_steps_per_second": 40.835, |
| "step": 381600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3746294295655754e-05, |
| "loss": 4.1936, |
| "step": 381952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3737908348145234e-05, |
| "loss": 4.189, |
| "step": 382464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3729522400634714e-05, |
| "loss": 4.2106, |
| "step": 382976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3721136453124194e-05, |
| "loss": 4.209, |
| "step": 383488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3712750505613674e-05, |
| "loss": 4.2037, |
| "step": 384000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3704364558103154e-05, |
| "loss": 4.1986, |
| "step": 384512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.369597861059263e-05, |
| "loss": 4.2005, |
| "step": 385024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3687592663082114e-05, |
| "loss": 4.1861, |
| "step": 385536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3679206715571594e-05, |
| "loss": 4.2029, |
| "step": 386048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3670820768061074e-05, |
| "loss": 4.2053, |
| "step": 386560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3662434820550554e-05, |
| "loss": 4.1968, |
| "step": 387072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3654048873040034e-05, |
| "loss": 4.2032, |
| "step": 387584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.364567930433324e-05, |
| "loss": 4.1919, |
| "step": 388096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.363729335682272e-05, |
| "loss": 4.198, |
| "step": 388608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.36289074093122e-05, |
| "loss": 4.188, |
| "step": 389120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362052146180168e-05, |
| "loss": 4.1863, |
| "step": 389632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.361213551429116e-05, |
| "loss": 4.1914, |
| "step": 390144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.360374956678064e-05, |
| "loss": 4.1911, |
| "step": 390656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.359536361927012e-05, |
| "loss": 4.1931, |
| "step": 391168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.35869776717596e-05, |
| "loss": 4.2056, |
| "step": 391680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357860810305281e-05, |
| "loss": 4.204, |
| "step": 392192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.35702221555423e-05, |
| "loss": 4.2017, |
| "step": 392704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.356183620803178e-05, |
| "loss": 4.1911, |
| "step": 393216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.355345026052126e-05, |
| "loss": 4.2046, |
| "step": 393728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.354508069181447e-05, |
| "loss": 4.1876, |
| "step": 394240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.353669474430395e-05, |
| "loss": 4.1978, |
| "step": 394752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.352830879679343e-05, |
| "loss": 4.1869, |
| "step": 395264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.351992284928291e-05, |
| "loss": 4.18, |
| "step": 395776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.351153690177239e-05, |
| "loss": 4.1872, |
| "step": 396288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.350315095426187e-05, |
| "loss": 4.1872, |
| "step": 396800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.349476500675134e-05, |
| "loss": 4.1906, |
| "step": 397312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3486395438044556e-05, |
| "loss": 4.1927, |
| "step": 397824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3478009490534036e-05, |
| "loss": 4.197, |
| "step": 398336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3469623543023516e-05, |
| "loss": 4.1953, |
| "step": 398848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3461237595512996e-05, |
| "loss": 4.1861, |
| "step": 399360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3452851648002476e-05, |
| "loss": 4.1971, |
| "step": 399872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3444465700491956e-05, |
| "loss": 4.1842, |
| "step": 400384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3436079752981436e-05, |
| "loss": 4.1825, |
| "step": 400896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3427693805470916e-05, |
| "loss": 4.177, |
| "step": 401408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3419324236764125e-05, |
| "loss": 4.1849, |
| "step": 401920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3410938289253605e-05, |
| "loss": 4.1883, |
| "step": 402432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3402552341743085e-05, |
| "loss": 4.1914, |
| "step": 402944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3394166394232565e-05, |
| "loss": 4.1791, |
| "step": 403456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3385796825525774e-05, |
| "loss": 4.1857, |
| "step": 403968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3377410878015254e-05, |
| "loss": 4.1917, |
| "step": 404480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3369024930504734e-05, |
| "loss": 4.1903, |
| "step": 404992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3360638982994214e-05, |
| "loss": 4.1925, |
| "step": 405504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.335226941428743e-05, |
| "loss": 4.1772, |
| "step": 406016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.334388346677691e-05, |
| "loss": 4.1632, |
| "step": 406528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.333549751926639e-05, |
| "loss": 4.1947, |
| "step": 407040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.332711157175587e-05, |
| "loss": 4.1869, |
| "step": 407552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331874200304908e-05, |
| "loss": 4.1837, |
| "step": 408064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331037243434229e-05, |
| "loss": 4.1821, |
| "step": 408576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.330198648683177e-05, |
| "loss": 4.1736, |
| "step": 409088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.329360053932125e-05, |
| "loss": 4.1675, |
| "step": 409600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.328521459181073e-05, |
| "loss": 4.1798, |
| "step": 410112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.327682864430021e-05, |
| "loss": 4.1681, |
| "step": 410624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326844269678969e-05, |
| "loss": 4.1767, |
| "step": 411136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326005674927917e-05, |
| "loss": 4.1865, |
| "step": 411648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3251670801768654e-05, |
| "loss": 4.1707, |
| "step": 412160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3243284854258134e-05, |
| "loss": 4.1673, |
| "step": 412672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3234898906747614e-05, |
| "loss": 4.184, |
| "step": 413184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.322652933804082e-05, |
| "loss": 4.1631, |
| "step": 413696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.32181433905303e-05, |
| "loss": 4.1676, |
| "step": 414208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.320975744301978e-05, |
| "loss": 4.1786, |
| "step": 414720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.320137149550926e-05, |
| "loss": 4.1762, |
| "step": 415232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.319300192680247e-05, |
| "loss": 4.165, |
| "step": 415744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.318461597929195e-05, |
| "loss": 4.1677, |
| "step": 416256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.317623003178143e-05, |
| "loss": 4.1656, |
| "step": 416768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.316784408427091e-05, |
| "loss": 4.1739, |
| "step": 417280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.315945813676039e-05, |
| "loss": 4.1803, |
| "step": 417792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.315107218924987e-05, |
| "loss": 4.1702, |
| "step": 418304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.314270262054309e-05, |
| "loss": 4.1801, |
| "step": 418816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.313431667303257e-05, |
| "loss": 4.1832, |
| "step": 419328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.312593072552205e-05, |
| "loss": 4.1792, |
| "step": 419840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.311754477801153e-05, |
| "loss": 4.1589, |
| "step": 420352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3109158830501e-05, |
| "loss": 4.179, |
| "step": 420864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310077288299048e-05, |
| "loss": 4.1775, |
| "step": 421376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.309238693547996e-05, |
| "loss": 4.1607, |
| "step": 421888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.308400098796944e-05, |
| "loss": 4.1763, |
| "step": 422400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.307563141926265e-05, |
| "loss": 4.1712, |
| "step": 422912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3067278229359595e-05, |
| "loss": 4.1831, |
| "step": 423424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3058892281849075e-05, |
| "loss": 4.1698, |
| "step": 423936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.305050633433856e-05, |
| "loss": 4.1521, |
| "step": 424448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.304212038682804e-05, |
| "loss": 4.1699, |
| "step": 424960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.303373443931752e-05, |
| "loss": 4.1646, |
| "step": 425472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3025348491807e-05, |
| "loss": 4.1727, |
| "step": 425984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3016962544296475e-05, |
| "loss": 4.1715, |
| "step": 426496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3008576596785955e-05, |
| "loss": 4.1641, |
| "step": 427008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.300020702807917e-05, |
| "loss": 4.1715, |
| "step": 427520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.299182108056865e-05, |
| "loss": 4.1682, |
| "step": 428032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.298345151186186e-05, |
| "loss": 4.1613, |
| "step": 428544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.297506556435134e-05, |
| "loss": 4.1643, |
| "step": 429056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.296667961684082e-05, |
| "loss": 4.1719, |
| "step": 429568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29582936693303e-05, |
| "loss": 4.1795, |
| "step": 430080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.294990772181978e-05, |
| "loss": 4.1625, |
| "step": 430592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.294152177430926e-05, |
| "loss": 4.1518, |
| "step": 431104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.293313582679874e-05, |
| "loss": 4.1653, |
| "step": 431616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.292474987928822e-05, |
| "loss": 4.1653, |
| "step": 432128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29163639317777e-05, |
| "loss": 4.174, |
| "step": 432640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.290797798426718e-05, |
| "loss": 4.1753, |
| "step": 433152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.289959203675666e-05, |
| "loss": 4.1559, |
| "step": 433664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.289120608924614e-05, |
| "loss": 4.1567, |
| "step": 434176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.288285289934308e-05, |
| "loss": 4.1684, |
| "step": 434688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.287446695183256e-05, |
| "loss": 4.1476, |
| "step": 435200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.286608100432204e-05, |
| "loss": 4.1641, |
| "step": 435712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.285769505681152e-05, |
| "loss": 4.1508, |
| "step": 436224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.284932548810473e-05, |
| "loss": 4.1666, |
| "step": 436736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.284093954059421e-05, |
| "loss": 4.1524, |
| "step": 437248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.283255359308369e-05, |
| "loss": 4.1665, |
| "step": 437760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.282416764557317e-05, |
| "loss": 4.1587, |
| "step": 438272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.281578169806265e-05, |
| "loss": 4.1478, |
| "step": 438784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.280739575055213e-05, |
| "loss": 4.1675, |
| "step": 439296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.279900980304161e-05, |
| "loss": 4.1519, |
| "step": 439808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.279062385553109e-05, |
| "loss": 4.175, |
| "step": 440320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.27822542868243e-05, |
| "loss": 4.1696, |
| "step": 440832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.277386833931378e-05, |
| "loss": 4.1545, |
| "step": 441344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.276548239180326e-05, |
| "loss": 4.1679, |
| "step": 441856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.275709644429274e-05, |
| "loss": 4.1509, |
| "step": 442368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274871049678222e-05, |
| "loss": 4.1597, |
| "step": 442880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.27403245492717e-05, |
| "loss": 4.1556, |
| "step": 443392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.273193860176118e-05, |
| "loss": 4.1663, |
| "step": 443904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.272355265425066e-05, |
| "loss": 4.1554, |
| "step": 444416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.271516670674014e-05, |
| "loss": 4.1533, |
| "step": 444928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.270679713803336e-05, |
| "loss": 4.1582, |
| "step": 445440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.269841119052284e-05, |
| "loss": 4.1568, |
| "step": 445952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.269002524301231e-05, |
| "loss": 4.1459, |
| "step": 446464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.268163929550179e-05, |
| "loss": 4.164, |
| "step": 446976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.267325334799127e-05, |
| "loss": 4.1448, |
| "step": 447488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.266486740048075e-05, |
| "loss": 4.173, |
| "step": 448000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.265648145297023e-05, |
| "loss": 4.1516, |
| "step": 448512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264811188426344e-05, |
| "loss": 4.1648, |
| "step": 449024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.263972593675292e-05, |
| "loss": 4.1496, |
| "step": 449536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26313399892424e-05, |
| "loss": 4.157, |
| "step": 450048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2622954041731886e-05, |
| "loss": 4.1642, |
| "step": 450560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2614568094221366e-05, |
| "loss": 4.1556, |
| "step": 451072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2606182146710846e-05, |
| "loss": 4.1521, |
| "step": 451584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2597796199200326e-05, |
| "loss": 4.163, |
| "step": 452096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2589410251689806e-05, |
| "loss": 4.1591, |
| "step": 452608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2581040682983015e-05, |
| "loss": 4.1643, |
| "step": 453120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2572671114276224e-05, |
| "loss": 4.1482, |
| "step": 453632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2564285166765704e-05, |
| "loss": 4.1633, |
| "step": 454144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2555899219255184e-05, |
| "loss": 4.1566, |
| "step": 454656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2547513271744664e-05, |
| "loss": 4.1566, |
| "step": 455168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253914370303787e-05, |
| "loss": 4.1563, |
| "step": 455680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253075775552735e-05, |
| "loss": 4.1552, |
| "step": 456192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.252237180801684e-05, |
| "loss": 4.1618, |
| "step": 456704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.251400223931005e-05, |
| "loss": 4.1337, |
| "step": 457216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.250561629179953e-05, |
| "loss": 4.1597, |
| "step": 457728 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.1602396965026855, |
| "eval_runtime": 292.8831, |
| "eval_samples_per_second": 1302.878, |
| "eval_steps_per_second": 40.716, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249723034428901e-05, |
| "loss": 4.1501, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248884439677849e-05, |
| "loss": 4.1398, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248045844926797e-05, |
| "loss": 4.1579, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247207250175745e-05, |
| "loss": 4.1619, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246368655424693e-05, |
| "loss": 4.1545, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245530060673641e-05, |
| "loss": 4.1491, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244691465922589e-05, |
| "loss": 4.1521, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243852871171537e-05, |
| "loss": 4.1406, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243014276420485e-05, |
| "loss": 4.1539, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242175681669432e-05, |
| "loss": 4.1544, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241337086918381e-05, |
| "loss": 4.1556, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.240498492167329e-05, |
| "loss": 4.153, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23966153529665e-05, |
| "loss": 4.149, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238822940545598e-05, |
| "loss": 4.1482, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237984345794546e-05, |
| "loss": 4.1409, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237145751043494e-05, |
| "loss": 4.1363, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236307156292442e-05, |
| "loss": 4.1463, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23546856154139e-05, |
| "loss": 4.1417, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234629966790338e-05, |
| "loss": 4.1475, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233791372039286e-05, |
| "loss": 4.1589, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232952777288234e-05, |
| "loss": 4.1597, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2321158204175546e-05, |
| "loss": 4.1517, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2312772256665026e-05, |
| "loss": 4.1449, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2304386309154506e-05, |
| "loss": 4.156, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2296000361643986e-05, |
| "loss": 4.1407, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.22876307929372e-05, |
| "loss": 4.1533, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227924484542668e-05, |
| "loss": 4.142, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227085889791616e-05, |
| "loss": 4.13, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226247295040564e-05, |
| "loss": 4.1432, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225408700289512e-05, |
| "loss": 4.1404, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.22457010553846e-05, |
| "loss": 4.1425, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.223731510787408e-05, |
| "loss": 4.1502, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222892916036356e-05, |
| "loss": 4.1491, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.22205759704605e-05, |
| "loss": 4.1479, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221220640175371e-05, |
| "loss": 4.1388, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2203820454243196e-05, |
| "loss": 4.156, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2195434506732676e-05, |
| "loss": 4.1374, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2187048559222156e-05, |
| "loss": 4.1313, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2178662611711635e-05, |
| "loss": 4.1365, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2170276664201115e-05, |
| "loss": 4.1336, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2161890716690595e-05, |
| "loss": 4.1418, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2153504769180075e-05, |
| "loss": 4.1498, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2145135200473284e-05, |
| "loss": 4.1321, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2136765631766494e-05, |
| "loss": 4.139, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128379684255973e-05, |
| "loss": 4.1484, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2119993736745453e-05, |
| "loss": 4.1488, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211160778923493e-05, |
| "loss": 4.1472, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.210322184172441e-05, |
| "loss": 4.1351, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209483589421389e-05, |
| "loss": 4.1117, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208644994670338e-05, |
| "loss": 4.1539, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207806399919286e-05, |
| "loss": 4.1435, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206967805168233e-05, |
| "loss": 4.1424, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206130848297555e-05, |
| "loss": 4.1342, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.205292253546503e-05, |
| "loss": 4.1295, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204455296675824e-05, |
| "loss": 4.1234, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203616701924772e-05, |
| "loss": 4.1319, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20277810717372e-05, |
| "loss": 4.1271, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201939512422668e-05, |
| "loss": 4.131, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201100917671615e-05, |
| "loss": 4.1437, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200262322920563e-05, |
| "loss": 4.1241, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199423728169512e-05, |
| "loss": 4.1232, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19858513341846e-05, |
| "loss": 4.1392, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197746538667408e-05, |
| "loss": 4.1229, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196909581796729e-05, |
| "loss": 4.1243, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196070987045677e-05, |
| "loss": 4.1325, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195232392294625e-05, |
| "loss": 4.1327, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194393797543573e-05, |
| "loss": 4.1212, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1935568406728936e-05, |
| "loss": 4.1198, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1927182459218416e-05, |
| "loss": 4.1214, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1918796511707896e-05, |
| "loss": 4.1353, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1910410564197376e-05, |
| "loss": 4.132, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902040995490585e-05, |
| "loss": 4.131, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189365504798007e-05, |
| "loss": 4.132, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188526910046955e-05, |
| "loss": 4.1398, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187688315295903e-05, |
| "loss": 4.1362, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186849720544851e-05, |
| "loss": 4.1187, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186011125793799e-05, |
| "loss": 4.132, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185172531042747e-05, |
| "loss": 4.1385, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184335574172068e-05, |
| "loss": 4.1145, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183496979421016e-05, |
| "loss": 4.1323, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182658384669964e-05, |
| "loss": 4.131, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181819789918912e-05, |
| "loss": 4.1368, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180982833048233e-05, |
| "loss": 4.1292, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180144238297181e-05, |
| "loss": 4.1115, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179305643546129e-05, |
| "loss": 4.1247, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178467048795077e-05, |
| "loss": 4.1233, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177628454044025e-05, |
| "loss": 4.1335, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1767914971733465e-05, |
| "loss": 4.1249, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1759529024222945e-05, |
| "loss": 4.1212, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1751143076712425e-05, |
| "loss": 4.1269, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1742757129201905e-05, |
| "loss": 4.1323, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1734371181691385e-05, |
| "loss": 4.1159, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1726001612984594e-05, |
| "loss": 4.122, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1717615665474074e-05, |
| "loss": 4.1321, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1709229717963554e-05, |
| "loss": 4.1347, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1700843770453034e-05, |
| "loss": 4.1216, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1692457822942514e-05, |
| "loss": 4.1111, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.168407187543199e-05, |
| "loss": 4.1207, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16757023067252e-05, |
| "loss": 4.1196, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166731635921469e-05, |
| "loss": 4.1323, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165893041170417e-05, |
| "loss": 4.134, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165054446419364e-05, |
| "loss": 4.1176, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.164217489548686e-05, |
| "loss": 4.1156, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163378894797634e-05, |
| "loss": 4.1231, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162540300046581e-05, |
| "loss": 4.1059, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161701705295529e-05, |
| "loss": 4.1231, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160863110544477e-05, |
| "loss": 4.1046, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160024515793425e-05, |
| "loss": 4.1245, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159185921042373e-05, |
| "loss": 4.1129, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158347326291321e-05, |
| "loss": 4.1226, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.157510369420643e-05, |
| "loss": 4.1179, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156671774669591e-05, |
| "loss": 4.1113, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1558348177989116e-05, |
| "loss": 4.1234, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1549962230478596e-05, |
| "loss": 4.1105, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1541576282968076e-05, |
| "loss": 4.1349, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1533190335457556e-05, |
| "loss": 4.1273, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1524804387947036e-05, |
| "loss": 4.1157, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1516434819240245e-05, |
| "loss": 4.126, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1508048871729725e-05, |
| "loss": 4.1095, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1499662924219205e-05, |
| "loss": 4.1194, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1491276976708685e-05, |
| "loss": 4.1179, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1482891029198165e-05, |
| "loss": 4.1214, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147452146049138e-05, |
| "loss": 4.116, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146613551298086e-05, |
| "loss": 4.115, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145774956547034e-05, |
| "loss": 4.1173, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144936361795982e-05, |
| "loss": 4.1146, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14409776704493e-05, |
| "loss": 4.11, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143260810174251e-05, |
| "loss": 4.121, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.142422215423199e-05, |
| "loss": 4.1023, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141583620672147e-05, |
| "loss": 4.1308, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140745025921095e-05, |
| "loss": 4.1162, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139906431170043e-05, |
| "loss": 4.1242, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139069474299364e-05, |
| "loss": 4.105, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138232517428685e-05, |
| "loss": 4.1206, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1373939226776335e-05, |
| "loss": 4.1231, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1365553279265815e-05, |
| "loss": 4.119, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1357167331755295e-05, |
| "loss": 4.1094, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1348781384244775e-05, |
| "loss": 4.1226, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1340395436734254e-05, |
| "loss": 4.1176, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1332009489223734e-05, |
| "loss": 4.1268, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1323623541713214e-05, |
| "loss": 4.1076, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1315253973006424e-05, |
| "loss": 4.1251, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1306868025495903e-05, |
| "loss": 4.1171, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1298482077985383e-05, |
| "loss": 4.1145, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129009613047486e-05, |
| "loss": 4.1165, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.128171018296434e-05, |
| "loss": 4.1156, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.127332423545382e-05, |
| "loss": 4.1178, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.12649382879433e-05, |
| "loss": 4.1004, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125655234043278e-05, |
| "loss": 4.1213, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.128416061401367, |
| "eval_runtime": 288.7842, |
| "eval_samples_per_second": 1321.371, |
| "eval_steps_per_second": 41.294, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.124816639292226e-05, |
| "loss": 4.1117, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.123978044541174e-05, |
| "loss": 4.1021, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.123141087670495e-05, |
| "loss": 4.1178, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.122302492919443e-05, |
| "loss": 4.1207, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.121463898168391e-05, |
| "loss": 4.1135, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.120625303417339e-05, |
| "loss": 4.1123, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.119786708666287e-05, |
| "loss": 4.1178, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118948113915235e-05, |
| "loss": 4.101, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118109519164183e-05, |
| "loss": 4.1143, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.117272562293504e-05, |
| "loss": 4.1155, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.116433967542452e-05, |
| "loss": 4.1138, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1155953727914e-05, |
| "loss": 4.117, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.114758415920722e-05, |
| "loss": 4.1072, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.11391982116967e-05, |
| "loss": 4.1079, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.113081226418618e-05, |
| "loss": 4.1042, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.112242631667566e-05, |
| "loss": 4.0962, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.111404036916514e-05, |
| "loss": 4.1087, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1105654421654617e-05, |
| "loss": 4.1033, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1097268474144097e-05, |
| "loss": 4.1086, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1088882526633577e-05, |
| "loss": 4.1183, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1080496579123056e-05, |
| "loss": 4.1232, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1072110631612536e-05, |
| "loss": 4.1195, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1063741062905746e-05, |
| "loss": 4.104, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1055355115395225e-05, |
| "loss": 4.1136, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1046969167884705e-05, |
| "loss": 4.1054, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103859959917792e-05, |
| "loss": 4.1128, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.10302136516674e-05, |
| "loss": 4.1069, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.102182770415688e-05, |
| "loss": 4.0942, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.101344175664636e-05, |
| "loss": 4.109, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1005055809135834e-05, |
| "loss": 4.099, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0996669861625314e-05, |
| "loss": 4.1056, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0988283914114794e-05, |
| "loss": 4.1128, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0979897966604274e-05, |
| "loss": 4.1087, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0971512019093754e-05, |
| "loss": 4.1086, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096314245038696e-05, |
| "loss": 4.1007, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.095475650287644e-05, |
| "loss": 4.1197, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.094638693416966e-05, |
| "loss": 4.1061, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.093800098665914e-05, |
| "loss": 4.092, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.092961503914862e-05, |
| "loss": 4.1034, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.09212290916381e-05, |
| "loss": 4.0941, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.091284314412758e-05, |
| "loss": 4.1024, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.090445719661706e-05, |
| "loss": 4.1081, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.089607124910654e-05, |
| "loss": 4.1015, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.088768530159602e-05, |
| "loss": 4.0979, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.08792993540855e-05, |
| "loss": 4.1105, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.087092978537871e-05, |
| "loss": 4.111, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.086254383786819e-05, |
| "loss": 4.1116, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.085415789035767e-05, |
| "loss": 4.0957, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.084577194284715e-05, |
| "loss": 4.0725, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.083738599533663e-05, |
| "loss": 4.1203, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0829016426629844e-05, |
| "loss": 4.1014, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0820630479119323e-05, |
| "loss": 4.1069, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.081226091041253e-05, |
| "loss": 4.0985, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080387496290201e-05, |
| "loss": 4.0911, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.079548901539149e-05, |
| "loss": 4.0896, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.078710306788097e-05, |
| "loss": 4.0955, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077871712037045e-05, |
| "loss": 4.0892, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077033117285993e-05, |
| "loss": 4.0923, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.076194522534941e-05, |
| "loss": 4.1124, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.075355927783889e-05, |
| "loss": 4.0869, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.074517333032837e-05, |
| "loss": 4.0858, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0736787382817845e-05, |
| "loss": 4.1077, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0728401435307325e-05, |
| "loss": 4.0826, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072003186660054e-05, |
| "loss": 4.087, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071166229789376e-05, |
| "loss": 4.0967, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.070327635038324e-05, |
| "loss": 4.0974, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069489040287272e-05, |
| "loss": 4.0854, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.06865044553622e-05, |
| "loss": 4.0937, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0678134886655406e-05, |
| "loss": 4.0809, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0669748939144886e-05, |
| "loss": 4.0991, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0661362991634366e-05, |
| "loss": 4.0974, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0652977044123846e-05, |
| "loss": 4.0937, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.064459109661332e-05, |
| "loss": 4.0988, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0636221527906535e-05, |
| "loss": 4.1009, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0627835580396015e-05, |
| "loss": 4.1065, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0619449632885495e-05, |
| "loss": 4.0833, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0611063685374975e-05, |
| "loss": 4.0974, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0602677737864455e-05, |
| "loss": 4.1036, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0594291790353935e-05, |
| "loss": 4.0772, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0585905842843415e-05, |
| "loss": 4.1018, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0577519895332895e-05, |
| "loss": 4.0929, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056916670542984e-05, |
| "loss": 4.1033, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056078075791932e-05, |
| "loss": 4.0972, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.055239481040879e-05, |
| "loss": 4.0796, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.054400886289827e-05, |
| "loss": 4.0823, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.053562291538775e-05, |
| "loss": 4.0897, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.052723696787723e-05, |
| "loss": 4.1001, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051886739917045e-05, |
| "loss": 4.0868, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051048145165993e-05, |
| "loss": 4.0887, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.050209550414941e-05, |
| "loss": 4.0921, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.049370955663889e-05, |
| "loss": 4.0966, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.04853399879321e-05, |
| "loss": 4.0858, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.047695404042158e-05, |
| "loss": 4.0869, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.046858447171479e-05, |
| "loss": 4.0966, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0460198524204267e-05, |
| "loss": 4.099, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0451812576693746e-05, |
| "loss": 4.0891, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0443426629183226e-05, |
| "loss": 4.0776, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0435040681672706e-05, |
| "loss": 4.0872, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0426654734162186e-05, |
| "loss": 4.0848, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0418268786651666e-05, |
| "loss": 4.1002, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.040988283914115e-05, |
| "loss": 4.0929, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.040151327043436e-05, |
| "loss": 4.0893, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.039312732292384e-05, |
| "loss": 4.0849, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.038474137541332e-05, |
| "loss": 4.0847, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.03763554279028e-05, |
| "loss": 4.0768, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036796948039228e-05, |
| "loss": 4.0859, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035958353288176e-05, |
| "loss": 4.0752, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035119758537124e-05, |
| "loss": 4.0914, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.034282801666445e-05, |
| "loss": 4.0821, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033444206915393e-05, |
| "loss": 4.0849, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.032605612164341e-05, |
| "loss": 4.082, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.031767017413289e-05, |
| "loss": 4.0764, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030928422662237e-05, |
| "loss": 4.0919, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030089827911185e-05, |
| "loss": 4.082, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.029251233160133e-05, |
| "loss": 4.1032, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028412638409081e-05, |
| "loss": 4.0936, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0275756815384027e-05, |
| "loss": 4.0848, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0267387246677236e-05, |
| "loss": 4.0901, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0259001299166716e-05, |
| "loss": 4.0781, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0250615351656196e-05, |
| "loss": 4.0841, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0242229404145675e-05, |
| "loss": 4.09, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0233859835438885e-05, |
| "loss": 4.0835, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0225473887928365e-05, |
| "loss": 4.083, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0217087940417845e-05, |
| "loss": 4.0838, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020871837171106e-05, |
| "loss": 4.086, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020033242420054e-05, |
| "loss": 4.0854, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.019194647669002e-05, |
| "loss": 4.0722, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01835605291795e-05, |
| "loss": 4.0885, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.017517458166898e-05, |
| "loss": 4.0739, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.016678863415845e-05, |
| "loss": 4.0967, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015840268664793e-05, |
| "loss": 4.0844, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015001673913741e-05, |
| "loss": 4.0866, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014163079162689e-05, |
| "loss": 4.0749, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01332612229201e-05, |
| "loss": 4.0901, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.012487527540958e-05, |
| "loss": 4.0886, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.011648932789906e-05, |
| "loss": 4.0882, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010810338038854e-05, |
| "loss": 4.0814, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009971743287803e-05, |
| "loss": 4.0892, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009133148536751e-05, |
| "loss": 4.0849, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.008294553785699e-05, |
| "loss": 4.0994, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.00745759691502e-05, |
| "loss": 4.0749, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.006619002163968e-05, |
| "loss": 4.093, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.005780407412916e-05, |
| "loss": 4.0845, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.004941812661864e-05, |
| "loss": 4.0838, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.004104855791185e-05, |
| "loss": 4.0829, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.003266261040133e-05, |
| "loss": 4.0861, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.002427666289081e-05, |
| "loss": 4.0848, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0015907094184016e-05, |
| "loss": 4.0691, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0007521146673496e-05, |
| "loss": 4.0881, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.1050825119018555, |
| "eval_runtime": 296.821, |
| "eval_samples_per_second": 1285.593, |
| "eval_steps_per_second": 40.176, |
| "step": 610560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999913519916298e-05, |
| "loss": 4.0751, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999074925165246e-05, |
| "loss": 4.0707, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998236330414194e-05, |
| "loss": 4.083, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.997397735663142e-05, |
| "loss": 4.0936, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99655914091209e-05, |
| "loss": 4.085, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995720546161038e-05, |
| "loss": 4.0784, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994881951409986e-05, |
| "loss": 4.0897, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994043356658934e-05, |
| "loss": 4.0698, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9932047619078815e-05, |
| "loss": 4.0816, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9923661671568295e-05, |
| "loss": 4.0876, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9915275724057775e-05, |
| "loss": 4.0815, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9906889776547255e-05, |
| "loss": 4.0825, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9898520207840464e-05, |
| "loss": 4.0846, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9890134260329944e-05, |
| "loss": 4.0733, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.988174831281943e-05, |
| "loss": 4.071, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.987336236530891e-05, |
| "loss": 4.0681, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.986497641779839e-05, |
| "loss": 4.076, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.985659047028787e-05, |
| "loss": 4.0768, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984820452277735e-05, |
| "loss": 4.0743, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983983495407056e-05, |
| "loss": 4.0838, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983144900656004e-05, |
| "loss": 4.0919, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982306305904952e-05, |
| "loss": 4.0885, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9814677111539e-05, |
| "loss": 4.0779, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980629116402848e-05, |
| "loss": 4.0801, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979790521651796e-05, |
| "loss": 4.0816, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978951926900744e-05, |
| "loss": 4.0763, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978113332149692e-05, |
| "loss": 4.0769, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.97727473739864e-05, |
| "loss": 4.0687, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976436142647588e-05, |
| "loss": 4.0739, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975597547896536e-05, |
| "loss": 4.0694, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9747605910258575e-05, |
| "loss": 4.0723, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9739219962748055e-05, |
| "loss": 4.0827, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973083401523753e-05, |
| "loss": 4.0814, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.972244806772701e-05, |
| "loss": 4.0782, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.971406212021649e-05, |
| "loss": 4.0744, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9705692551509704e-05, |
| "loss": 4.0829, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9697322982802913e-05, |
| "loss": 4.076, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9688937035292393e-05, |
| "loss": 4.0651, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9680551087781873e-05, |
| "loss": 4.0742, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.967216514027135e-05, |
| "loss": 4.064, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966377919276083e-05, |
| "loss": 4.0714, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.965539324525031e-05, |
| "loss": 4.0801, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964700729773979e-05, |
| "loss": 4.0677, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963862135022927e-05, |
| "loss": 4.072, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963025178152248e-05, |
| "loss": 4.0827, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.962186583401196e-05, |
| "loss": 4.0787, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.961347988650144e-05, |
| "loss": 4.0791, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960509393899092e-05, |
| "loss": 4.0714, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95967079914804e-05, |
| "loss": 4.0451, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958832204396988e-05, |
| "loss": 4.0863, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957993609645936e-05, |
| "loss": 4.0719, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957155014894884e-05, |
| "loss": 4.0779, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956318058024205e-05, |
| "loss": 4.0728, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955479463273154e-05, |
| "loss": 4.0623, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954640868522102e-05, |
| "loss": 4.0585, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95380227377105e-05, |
| "loss": 4.0675, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952963679019998e-05, |
| "loss": 4.0612, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952125084268946e-05, |
| "loss": 4.0601, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.951288127398267e-05, |
| "loss": 4.0828, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.950449532647215e-05, |
| "loss": 4.06, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949610937896163e-05, |
| "loss": 4.055, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9487723431451107e-05, |
| "loss": 4.0796, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9479337483940587e-05, |
| "loss": 4.0494, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9470967915233796e-05, |
| "loss": 4.0562, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9462581967723276e-05, |
| "loss": 4.0711, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454196020212756e-05, |
| "loss": 4.0699, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9445810072702235e-05, |
| "loss": 4.0543, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9437424125191715e-05, |
| "loss": 4.0605, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9429038177681195e-05, |
| "loss": 4.0552, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9420652230170675e-05, |
| "loss": 4.0695, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9412266282660155e-05, |
| "loss": 4.066, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9403896713953364e-05, |
| "loss": 4.0651, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9395510766442844e-05, |
| "loss": 4.0699, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9387124818932324e-05, |
| "loss": 4.0697, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9378738871421804e-05, |
| "loss": 4.0788, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9370352923911284e-05, |
| "loss": 4.054, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936199973400823e-05, |
| "loss": 4.0701, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935361378649771e-05, |
| "loss": 4.074, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934522783898719e-05, |
| "loss": 4.0469, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.933684189147667e-05, |
| "loss": 4.0705, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932845594396615e-05, |
| "loss": 4.0691, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932006999645563e-05, |
| "loss": 4.0751, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.931168404894511e-05, |
| "loss": 4.0677, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.930331448023832e-05, |
| "loss": 4.0556, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92949285327278e-05, |
| "loss": 4.0488, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928654258521728e-05, |
| "loss": 4.0615, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927815663770676e-05, |
| "loss": 4.0688, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926977069019624e-05, |
| "loss": 4.0616, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926138474268572e-05, |
| "loss": 4.0577, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.92529987951752e-05, |
| "loss": 4.0618, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924461284766468e-05, |
| "loss": 4.0706, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9236243278957894e-05, |
| "loss": 4.0582, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9227857331447374e-05, |
| "loss": 4.0562, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9219471383936854e-05, |
| "loss": 4.0661, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.921110181523006e-05, |
| "loss": 4.0719, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.920271586771954e-05, |
| "loss": 4.0602, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.919432992020902e-05, |
| "loss": 4.0525, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.91859439726985e-05, |
| "loss": 4.0555, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.917755802518798e-05, |
| "loss": 4.059, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916917207767746e-05, |
| "loss": 4.0699, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916078613016694e-05, |
| "loss": 4.0689, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.915241656146015e-05, |
| "loss": 4.0599, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914403061394963e-05, |
| "loss": 4.0563, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.913564466643911e-05, |
| "loss": 4.051, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912725871892859e-05, |
| "loss": 4.056, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911888915022181e-05, |
| "loss": 4.0572, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911050320271129e-05, |
| "loss": 4.0462, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910211725520077e-05, |
| "loss": 4.0656, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909373130769025e-05, |
| "loss": 4.0548, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908534536017973e-05, |
| "loss": 4.0559, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90769594126692e-05, |
| "loss": 4.0507, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906857346515868e-05, |
| "loss": 4.0487, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906018751764816e-05, |
| "loss": 4.0683, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.905181794894137e-05, |
| "loss": 4.0494, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.904343200143085e-05, |
| "loss": 4.0773, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035062432724065e-05, |
| "loss": 4.0649, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9026676485213545e-05, |
| "loss": 4.0577, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.901830691650676e-05, |
| "loss": 4.0608, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900993734779997e-05, |
| "loss": 4.055, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900155140028945e-05, |
| "loss": 4.0552, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.899316545277893e-05, |
| "loss": 4.0628, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.898477950526841e-05, |
| "loss": 4.0548, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.897639355775789e-05, |
| "loss": 4.0586, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896800761024737e-05, |
| "loss": 4.0516, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895962166273684e-05, |
| "loss": 4.0574, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895123571522632e-05, |
| "loss": 4.057, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894286614651954e-05, |
| "loss": 4.0449, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.893448019900902e-05, |
| "loss": 4.0618, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89260942514985e-05, |
| "loss": 4.0473, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891770830398798e-05, |
| "loss": 4.0704, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890932235647746e-05, |
| "loss": 4.0579, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890093640896694e-05, |
| "loss": 4.0594, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.889255046145642e-05, |
| "loss": 4.0483, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88841645139459e-05, |
| "loss": 4.0671, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.887579494523911e-05, |
| "loss": 4.0573, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.886740899772859e-05, |
| "loss": 4.0604, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885902305021807e-05, |
| "loss": 4.0542, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885063710270755e-05, |
| "loss": 4.0625, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884225115519703e-05, |
| "loss": 4.0579, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.883386520768651e-05, |
| "loss": 4.0748, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882547926017599e-05, |
| "loss": 4.0475, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.881709331266547e-05, |
| "loss": 4.0649, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880870736515495e-05, |
| "loss": 4.0589, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8800321417644434e-05, |
| "loss": 4.0605, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8791935470133914e-05, |
| "loss": 4.0498, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.878354952262339e-05, |
| "loss": 4.0621, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.87751799539166e-05, |
| "loss": 4.0564, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876679400640608e-05, |
| "loss": 4.0501, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.875842443769929e-05, |
| "loss": 4.058, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.085964679718018, |
| "eval_runtime": 319.8436, |
| "eval_samples_per_second": 1193.055, |
| "eval_steps_per_second": 37.284, |
| "step": 686880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.875003849018877e-05, |
| "loss": 4.0491, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.874165254267825e-05, |
| "loss": 4.0426, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873326659516773e-05, |
| "loss": 4.054, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872489702646094e-05, |
| "loss": 4.0675, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871651107895042e-05, |
| "loss": 4.0607, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.87081251314399e-05, |
| "loss": 4.0496, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869973918392939e-05, |
| "loss": 4.065, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869135323641886e-05, |
| "loss": 4.0444, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.868298366771208e-05, |
| "loss": 4.0528, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8674597720201557e-05, |
| "loss": 4.0639, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866621177269103e-05, |
| "loss": 4.052, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8657842203984246e-05, |
| "loss": 4.0568, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8649456256473726e-05, |
| "loss": 4.06, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8641070308963206e-05, |
| "loss": 4.0486, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.863268436145268e-05, |
| "loss": 4.0448, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.862429841394216e-05, |
| "loss": 4.0421, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.861591246643164e-05, |
| "loss": 4.0485, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8607526518921125e-05, |
| "loss": 4.0509, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8599140571410605e-05, |
| "loss": 4.0481, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8590754623900085e-05, |
| "loss": 4.0602, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8582368676389565e-05, |
| "loss": 4.0649, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8573982728879045e-05, |
| "loss": 4.0627, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8565596781368525e-05, |
| "loss": 4.0546, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8557210833858005e-05, |
| "loss": 4.049, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854885764395494e-05, |
| "loss": 4.0576, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854047169644442e-05, |
| "loss": 4.0511, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.85320857489339e-05, |
| "loss": 4.0482, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852369980142338e-05, |
| "loss": 4.0478, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851531385391286e-05, |
| "loss": 4.0476, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850692790640234e-05, |
| "loss": 4.0455, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849855833769556e-05, |
| "loss": 4.0471, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849017239018504e-05, |
| "loss": 4.0539, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.848178644267452e-05, |
| "loss": 4.0612, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8473400495164e-05, |
| "loss": 4.0499, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.846501454765348e-05, |
| "loss": 4.0526, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.845664497894669e-05, |
| "loss": 4.0494, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.844825903143617e-05, |
| "loss": 4.0546, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843988946272938e-05, |
| "loss": 4.0403, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843150351521886e-05, |
| "loss": 4.0477, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842311756770834e-05, |
| "loss": 4.0364, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.841473162019782e-05, |
| "loss": 4.05, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84063456726873e-05, |
| "loss": 4.0486, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839795972517678e-05, |
| "loss": 4.0451, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838957377766626e-05, |
| "loss": 4.0486, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8381187830155743e-05, |
| "loss": 4.0555, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8372801882645223e-05, |
| "loss": 4.054, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836443231393843e-05, |
| "loss": 4.054, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.835604636642791e-05, |
| "loss": 4.049, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.834766041891739e-05, |
| "loss": 4.0184, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8339274471406866e-05, |
| "loss": 4.0586, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8330888523896346e-05, |
| "loss": 4.0483, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8322502576385826e-05, |
| "loss": 4.053, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.831413300767904e-05, |
| "loss": 4.0468, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8305747060168515e-05, |
| "loss": 4.0366, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8297361112657995e-05, |
| "loss": 4.034, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828897516514748e-05, |
| "loss": 4.0407, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828058921763696e-05, |
| "loss": 4.0395, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.827220327012644e-05, |
| "loss": 4.0335, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.826381732261592e-05, |
| "loss": 4.0584, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.825544775390913e-05, |
| "loss": 4.0395, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824706180639861e-05, |
| "loss": 4.0262, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823867585888809e-05, |
| "loss": 4.057, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823028991137757e-05, |
| "loss": 4.0279, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822190396386705e-05, |
| "loss": 4.032, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.821351801635653e-05, |
| "loss": 4.0479, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.820513206884601e-05, |
| "loss": 4.0412, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.819676250013922e-05, |
| "loss": 4.0348, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.81883765526287e-05, |
| "loss": 4.0355, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817999060511818e-05, |
| "loss": 4.031, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8171604657607666e-05, |
| "loss": 4.0439, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8163235088900875e-05, |
| "loss": 4.0423, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8154849141390355e-05, |
| "loss": 4.0436, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8146463193879835e-05, |
| "loss": 4.0426, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8138077246369315e-05, |
| "loss": 4.0467, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8129691298858795e-05, |
| "loss": 4.0563, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8121305351348275e-05, |
| "loss": 4.028, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8112935782641484e-05, |
| "loss": 4.0433, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8104549835130964e-05, |
| "loss": 4.0524, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8096163887620444e-05, |
| "loss": 4.0245, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8087777940109924e-05, |
| "loss": 4.0417, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.807940837140313e-05, |
| "loss": 4.0463, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.807102242389262e-05, |
| "loss": 4.0482, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.80626364763821e-05, |
| "loss": 4.0497, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.805425052887158e-05, |
| "loss": 4.026, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804586458136105e-05, |
| "loss": 4.0235, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.803747863385053e-05, |
| "loss": 4.0426, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802909268634001e-05, |
| "loss": 4.0441, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802070673882949e-05, |
| "loss": 4.0342, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.80123371701227e-05, |
| "loss": 4.0375, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800395122261218e-05, |
| "loss": 4.0348, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799556527510166e-05, |
| "loss": 4.0457, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798719570639488e-05, |
| "loss": 4.0369, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797880975888436e-05, |
| "loss": 4.034, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797042381137384e-05, |
| "loss": 4.0375, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796203786386332e-05, |
| "loss": 4.0475, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7953668295156526e-05, |
| "loss": 4.0394, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7945282347646006e-05, |
| "loss": 4.0295, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7936896400135486e-05, |
| "loss": 4.029, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79285268314287e-05, |
| "loss": 4.0358, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7920140883918175e-05, |
| "loss": 4.045, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7911754936407655e-05, |
| "loss": 4.0426, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7903368988897135e-05, |
| "loss": 4.0361, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7894983041386615e-05, |
| "loss": 4.0356, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7886597093876095e-05, |
| "loss": 4.0272, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7878211146365575e-05, |
| "loss": 4.0361, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7869825198855055e-05, |
| "loss": 4.0283, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.786145563014827e-05, |
| "loss": 4.0245, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785306968263775e-05, |
| "loss": 4.0421, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.784470011393096e-05, |
| "loss": 4.0292, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.783631416642044e-05, |
| "loss": 4.0338, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782792821890992e-05, |
| "loss": 4.0283, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78195422713994e-05, |
| "loss": 4.0241, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781115632388888e-05, |
| "loss": 4.0414, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.780277037637836e-05, |
| "loss": 4.0309, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.779438442886784e-05, |
| "loss": 4.0505, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.778599848135732e-05, |
| "loss": 4.0412, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777762891265053e-05, |
| "loss": 4.0358, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.776924296514001e-05, |
| "loss": 4.039, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7760873396433224e-05, |
| "loss": 4.0326, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7752487448922704e-05, |
| "loss": 4.0288, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7744101501412184e-05, |
| "loss": 4.0416, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7735715553901664e-05, |
| "loss": 4.0374, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7727329606391144e-05, |
| "loss": 4.0317, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7718943658880624e-05, |
| "loss": 4.0283, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7710557711370104e-05, |
| "loss": 4.0334, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7702171763859584e-05, |
| "loss": 4.0333, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7693785816349064e-05, |
| "loss": 4.0228, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768541624764227e-05, |
| "loss": 4.039, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767703030013175e-05, |
| "loss": 4.019, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766864435262123e-05, |
| "loss": 4.0524, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766027478391444e-05, |
| "loss": 4.0317, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765188883640393e-05, |
| "loss": 4.0382, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.764350288889341e-05, |
| "loss": 4.0231, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.763511694138289e-05, |
| "loss": 4.0436, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.762673099387236e-05, |
| "loss": 4.0345, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.761834504636184e-05, |
| "loss": 4.0377, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.760995909885132e-05, |
| "loss": 4.0332, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76015731513408e-05, |
| "loss": 4.0344, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.759318720383028e-05, |
| "loss": 4.0359, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758481763512349e-05, |
| "loss": 4.0507, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.757643168761297e-05, |
| "loss": 4.0242, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756804574010245e-05, |
| "loss": 4.043, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755965979259193e-05, |
| "loss": 4.039, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755127384508141e-05, |
| "loss": 4.0365, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7542904276374627e-05, |
| "loss": 4.0258, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7534518328864107e-05, |
| "loss": 4.0381, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7526132381353586e-05, |
| "loss": 4.0321, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7517746433843066e-05, |
| "loss": 4.0287, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7509360486332546e-05, |
| "loss": 4.0344, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.070601463317871, |
| "eval_runtime": 289.8375, |
| "eval_samples_per_second": 1316.569, |
| "eval_steps_per_second": 41.144, |
| "step": 763200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7500974538822026e-05, |
| "loss": 4.0258, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7492588591311506e-05, |
| "loss": 4.0178, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7484202643800986e-05, |
| "loss": 4.0326, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7475816696290466e-05, |
| "loss": 4.0431, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7467430748779946e-05, |
| "loss": 4.0395, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7459044801269426e-05, |
| "loss": 4.0289, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.74506588537589e-05, |
| "loss": 4.0369, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.744227290624838e-05, |
| "loss": 4.0255, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7433886958737866e-05, |
| "loss": 4.0283, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7425501011227346e-05, |
| "loss": 4.0402, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7417115063716826e-05, |
| "loss": 4.0306, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7408729116206306e-05, |
| "loss": 4.0382, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7400343168695786e-05, |
| "loss": 4.0379, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7391957221185266e-05, |
| "loss": 4.0249, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7383571273674746e-05, |
| "loss": 4.0218, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7375185326164226e-05, |
| "loss": 4.0174, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7366815757457435e-05, |
| "loss": 4.0287, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7358429809946915e-05, |
| "loss": 4.0309, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7350043862436395e-05, |
| "loss": 4.022, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7341657914925875e-05, |
| "loss": 4.0351, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7333271967415355e-05, |
| "loss": 4.0441, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7324902398708564e-05, |
| "loss": 4.0383, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7316516451198044e-05, |
| "loss": 4.0338, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.730813050368753e-05, |
| "loss": 4.0276, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.729974455617701e-05, |
| "loss": 4.0342, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.729135860866649e-05, |
| "loss": 4.0308, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7282972661155963e-05, |
| "loss": 4.0228, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7274586713645443e-05, |
| "loss": 4.0196, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726620076613492e-05, |
| "loss": 4.0299, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.725783119742814e-05, |
| "loss": 4.0237, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724944524991761e-05, |
| "loss": 4.0257, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724107568121083e-05, |
| "loss": 4.0321, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.723268973370031e-05, |
| "loss": 4.0344, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.722430378618979e-05, |
| "loss": 4.0345, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721591783867927e-05, |
| "loss": 4.0231, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7207548269972484e-05, |
| "loss": 4.0324, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7199162322461964e-05, |
| "loss": 4.0351, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719077637495144e-05, |
| "loss": 4.0136, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.718239042744092e-05, |
| "loss": 4.0273, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71740044799304e-05, |
| "loss": 4.0158, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.716561853241988e-05, |
| "loss": 4.0276, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.715723258490936e-05, |
| "loss": 4.0235, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7148863016202566e-05, |
| "loss": 4.0244, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7140477068692046e-05, |
| "loss": 4.028, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7132091121181526e-05, |
| "loss": 4.0348, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7123705173671006e-05, |
| "loss": 4.0329, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7115319226160486e-05, |
| "loss": 4.0343, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7106933278649966e-05, |
| "loss": 4.0245, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709854733113945e-05, |
| "loss": 4.0001, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709017776243266e-05, |
| "loss": 4.0349, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.708179181492214e-05, |
| "loss": 4.0248, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.707340586741162e-05, |
| "loss": 4.032, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.706503629870483e-05, |
| "loss": 4.0242, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.705665035119431e-05, |
| "loss": 4.0186, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704826440368379e-05, |
| "loss": 4.0101, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.703987845617327e-05, |
| "loss": 4.0177, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.703149250866275e-05, |
| "loss": 4.0218, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.702310656115223e-05, |
| "loss": 4.0067, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701472061364171e-05, |
| "loss": 4.0369, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700633466613119e-05, |
| "loss": 4.0197, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699794871862067e-05, |
| "loss": 4.0063, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6989579149913886e-05, |
| "loss": 4.0323, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6981193202403366e-05, |
| "loss": 4.0081, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6972807254892846e-05, |
| "loss": 4.0111, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6964421307382326e-05, |
| "loss": 4.0253, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69560353598718e-05, |
| "loss": 4.0208, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694764941236128e-05, |
| "loss": 4.0124, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6939279843654495e-05, |
| "loss": 4.0118, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6930893896143975e-05, |
| "loss": 4.011, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.692250794863345e-05, |
| "loss": 4.0188, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.691412200112293e-05, |
| "loss": 4.0266, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.690573605361241e-05, |
| "loss": 4.0202, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6897366484905624e-05, |
| "loss": 4.0208, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6888980537395104e-05, |
| "loss": 4.0194, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6880594589884584e-05, |
| "loss": 4.0365, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6872208642374064e-05, |
| "loss": 4.0082, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6863822694863544e-05, |
| "loss": 4.023, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6855436747353024e-05, |
| "loss": 4.0251, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.684706717864623e-05, |
| "loss": 4.0092, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683868123113571e-05, |
| "loss": 4.0193, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683029528362519e-05, |
| "loss": 4.0252, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682190933611467e-05, |
| "loss": 4.0267, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681352338860415e-05, |
| "loss": 4.0314, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680515381989736e-05, |
| "loss": 4.0011, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679676787238684e-05, |
| "loss": 4.0058, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678838192487632e-05, |
| "loss": 4.0186, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677999597736581e-05, |
| "loss": 4.0229, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677161002985529e-05, |
| "loss": 4.0109, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.676322408234477e-05, |
| "loss": 4.0184, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.675483813483425e-05, |
| "loss": 4.0155, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.674646856612746e-05, |
| "loss": 4.0248, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673808261861694e-05, |
| "loss": 4.0157, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672969667110642e-05, |
| "loss": 4.0108, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.67213107235959e-05, |
| "loss": 4.0203, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6712941154889106e-05, |
| "loss": 4.0256, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6704555207378586e-05, |
| "loss": 4.0223, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6696169259868066e-05, |
| "loss": 4.0067, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6687783312357546e-05, |
| "loss": 4.007, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.667941374365076e-05, |
| "loss": 4.0176, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.667102779614024e-05, |
| "loss": 4.0237, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.666264184862972e-05, |
| "loss": 4.0235, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.66542559011192e-05, |
| "loss": 4.0127, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664586995360868e-05, |
| "loss": 4.016, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663748400609816e-05, |
| "loss": 4.0074, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6629098058587635e-05, |
| "loss": 4.0137, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6620712111077115e-05, |
| "loss": 4.0083, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6612326163566595e-05, |
| "loss": 4.0081, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.660395659485981e-05, |
| "loss": 4.0194, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6595570647349284e-05, |
| "loss": 4.0099, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65872010786425e-05, |
| "loss": 4.0112, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657881513113198e-05, |
| "loss": 4.0071, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657042918362146e-05, |
| "loss": 4.0052, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656204323611094e-05, |
| "loss": 4.0188, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655365728860042e-05, |
| "loss": 4.0138, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65452713410899e-05, |
| "loss": 4.0227, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653688539357938e-05, |
| "loss": 4.0211, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652849944606886e-05, |
| "loss": 4.0168, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6520146256165805e-05, |
| "loss": 4.0181, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6511760308655285e-05, |
| "loss": 4.0147, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.650337436114476e-05, |
| "loss": 4.0104, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.649498841363424e-05, |
| "loss": 4.0191, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.648660246612372e-05, |
| "loss": 4.014, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.64782165186132e-05, |
| "loss": 4.0177, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6469830571102684e-05, |
| "loss": 4.0053, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6461444623592164e-05, |
| "loss": 4.0177, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6453058676081644e-05, |
| "loss": 4.0099, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.644470548617858e-05, |
| "loss": 4.0056, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.643631953866806e-05, |
| "loss": 4.0154, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642793359115754e-05, |
| "loss": 3.9971, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.641954764364702e-05, |
| "loss": 4.0348, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.64111616961365e-05, |
| "loss": 4.0092, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.640277574862598e-05, |
| "loss": 4.0192, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.639440617991919e-05, |
| "loss": 4.0045, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638602023240867e-05, |
| "loss": 4.0211, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637763428489815e-05, |
| "loss": 4.0136, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636924833738764e-05, |
| "loss": 4.0202, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636086238987712e-05, |
| "loss": 4.011, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.635249282117033e-05, |
| "loss": 4.0122, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.634410687365981e-05, |
| "loss": 4.0164, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633572092614929e-05, |
| "loss": 4.0336, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632733497863877e-05, |
| "loss": 4.0015, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631894903112825e-05, |
| "loss": 4.0246, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6310579462421456e-05, |
| "loss": 4.0225, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6302193514910936e-05, |
| "loss": 4.0136, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6293807567400416e-05, |
| "loss": 4.0071, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6285421619889896e-05, |
| "loss": 4.015, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6277035672379376e-05, |
| "loss": 4.0121, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6268649724868856e-05, |
| "loss": 4.0121, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6260263777358336e-05, |
| "loss": 4.0105, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.057331562042236, |
| "eval_runtime": 302.0736, |
| "eval_samples_per_second": 1263.239, |
| "eval_steps_per_second": 39.477, |
| "step": 839520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6251877829847816e-05, |
| "loss": 4.0066, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.624350826114103e-05, |
| "loss": 4.0001, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.623512231363051e-05, |
| "loss": 4.0112, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.622675274492372e-05, |
| "loss": 4.0213, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.62183667974132e-05, |
| "loss": 4.0201, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.620998084990268e-05, |
| "loss": 4.0113, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.620159490239216e-05, |
| "loss": 4.0192, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.619322533368537e-05, |
| "loss": 4.0078, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.618483938617485e-05, |
| "loss": 4.005, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.617645343866433e-05, |
| "loss": 4.0163, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616806749115381e-05, |
| "loss": 4.0197, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615968154364329e-05, |
| "loss": 4.0169, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6151311974936505e-05, |
| "loss": 4.0146, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6142926027425985e-05, |
| "loss": 4.0045, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6134540079915465e-05, |
| "loss": 4.0066, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6126154132404945e-05, |
| "loss": 3.9931, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6117784563698154e-05, |
| "loss": 4.0133, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6109398616187634e-05, |
| "loss": 4.01, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6101012668677114e-05, |
| "loss": 4.0012, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.609262672116659e-05, |
| "loss": 4.0143, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.608424077365607e-05, |
| "loss": 4.0244, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.607585482614555e-05, |
| "loss": 4.0182, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.606746887863503e-05, |
| "loss": 4.0144, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605908293112451e-05, |
| "loss": 4.0099, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605071336241772e-05, |
| "loss": 4.0149, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.60423274149072e-05, |
| "loss": 4.0112, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.603395784620042e-05, |
| "loss": 4.0025, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.602557189868989e-05, |
| "loss": 4.0006, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.601718595117937e-05, |
| "loss": 4.0102, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.600880000366885e-05, |
| "loss": 4.0058, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.600043043496206e-05, |
| "loss": 4.0098, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.599204448745154e-05, |
| "loss": 4.0075, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.598365853994102e-05, |
| "loss": 4.0108, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59752725924305e-05, |
| "loss": 4.0183, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.596688664491998e-05, |
| "loss": 4.0031, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59585170762132e-05, |
| "loss": 4.0134, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595013112870268e-05, |
| "loss": 4.0146, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.594174518119216e-05, |
| "loss": 3.9929, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5933375612485366e-05, |
| "loss": 4.01, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5924989664974846e-05, |
| "loss": 3.9996, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.591662009626806e-05, |
| "loss": 4.003, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5908234148757535e-05, |
| "loss": 4.0067, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5899848201247015e-05, |
| "loss": 4.005, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5891462253736495e-05, |
| "loss": 4.009, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5883076306225975e-05, |
| "loss": 4.0138, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5874690358715455e-05, |
| "loss": 4.0139, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5866304411204935e-05, |
| "loss": 4.0124, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5857918463694414e-05, |
| "loss": 4.0064, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.58495325161839e-05, |
| "loss": 3.9804, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.584114656867338e-05, |
| "loss": 4.015, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.583276062116286e-05, |
| "loss": 4.0063, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.582437467365234e-05, |
| "loss": 4.0143, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.581600510494555e-05, |
| "loss": 4.0027, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.580761915743503e-05, |
| "loss": 3.9959, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579923320992451e-05, |
| "loss": 4.0, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579084726241399e-05, |
| "loss": 3.9925, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.578246131490347e-05, |
| "loss": 4.0046, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.577407536739295e-05, |
| "loss": 3.9833, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576568941988242e-05, |
| "loss": 4.021, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57573034723719e-05, |
| "loss": 4.0034, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574893390366512e-05, |
| "loss": 3.985, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57405479561546e-05, |
| "loss": 4.0132, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.573216200864408e-05, |
| "loss": 3.991, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5723792439937295e-05, |
| "loss": 3.9928, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5715406492426775e-05, |
| "loss": 4.0025, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.570702054491625e-05, |
| "loss": 4.0005, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569863459740573e-05, |
| "loss": 3.9931, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5690265028698944e-05, |
| "loss": 3.9978, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5681879081188424e-05, |
| "loss": 3.9889, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.56734931336779e-05, |
| "loss": 4.0002, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.566510718616738e-05, |
| "loss": 4.0062, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.565673761746059e-05, |
| "loss": 4.0006, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.564835166995007e-05, |
| "loss": 4.0063, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.563996572243955e-05, |
| "loss": 3.9956, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.563159615373277e-05, |
| "loss": 4.0221, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.562321020622225e-05, |
| "loss": 3.9879, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.561482425871172e-05, |
| "loss": 4.0076, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.56064383112012e-05, |
| "loss": 3.9976, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559805236369068e-05, |
| "loss": 3.9981, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558966641618016e-05, |
| "loss": 3.9982, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558128046866964e-05, |
| "loss": 4.0062, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.557289452115912e-05, |
| "loss": 4.0103, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55645085736486e-05, |
| "loss": 4.0118, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.555612262613808e-05, |
| "loss": 3.9853, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554773667862756e-05, |
| "loss": 3.9871, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553935073111704e-05, |
| "loss": 3.9975, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553098116241026e-05, |
| "loss": 4.004, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.552259521489974e-05, |
| "loss": 3.9934, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.551420926738922e-05, |
| "loss": 3.9968, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55058233198787e-05, |
| "loss": 3.9981, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5497453751171906e-05, |
| "loss": 4.0015, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5489084182465115e-05, |
| "loss": 3.999, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5480698234954595e-05, |
| "loss": 3.9925, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5472312287444075e-05, |
| "loss": 4.0021, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5463926339933555e-05, |
| "loss": 4.0055, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5455540392423035e-05, |
| "loss": 4.0063, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5447170823716244e-05, |
| "loss": 3.9876, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5438784876205724e-05, |
| "loss": 3.9878, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.543039892869521e-05, |
| "loss": 4.0006, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.542201298118469e-05, |
| "loss": 4.0027, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.541362703367417e-05, |
| "loss": 4.0065, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.540525746496738e-05, |
| "loss": 3.996, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.539687151745686e-05, |
| "loss": 3.997, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538848556994634e-05, |
| "loss": 3.9899, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538009962243582e-05, |
| "loss": 3.9946, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.537173005372903e-05, |
| "loss": 3.9924, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.536334410621851e-05, |
| "loss": 3.9897, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.535495815870799e-05, |
| "loss": 3.9977, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.534657221119747e-05, |
| "loss": 3.9942, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533818626368695e-05, |
| "loss": 3.9928, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532980031617643e-05, |
| "loss": 3.9891, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5321430747469644e-05, |
| "loss": 3.9867, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5313044799959124e-05, |
| "loss": 4.004, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5304658852448604e-05, |
| "loss": 3.996, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5296272904938084e-05, |
| "loss": 4.0042, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528790333623129e-05, |
| "loss": 3.9984, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527951738872077e-05, |
| "loss": 4.0023, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527113144121025e-05, |
| "loss": 4.0013, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.526274549369973e-05, |
| "loss": 3.9914, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5254359546189206e-05, |
| "loss": 4.0004, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.524598997748242e-05, |
| "loss": 3.9985, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.52376040299719e-05, |
| "loss": 3.9949, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522921808246138e-05, |
| "loss": 4.0011, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522083213495086e-05, |
| "loss": 3.9919, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.521246256624408e-05, |
| "loss": 3.9962, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.520407661873356e-05, |
| "loss": 3.9904, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.519569067122303e-05, |
| "loss": 3.9898, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.518730472371251e-05, |
| "loss": 3.9987, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.517893515500573e-05, |
| "loss": 3.9853, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.517054920749521e-05, |
| "loss": 4.0126, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.516216325998468e-05, |
| "loss": 3.9949, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.515377731247416e-05, |
| "loss": 3.9998, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5145407743767376e-05, |
| "loss": 3.987, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5137021796256856e-05, |
| "loss": 4.0028, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5128635848746336e-05, |
| "loss": 3.9992, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5120249901235816e-05, |
| "loss": 4.0023, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5111863953725296e-05, |
| "loss": 3.9941, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5103478006214776e-05, |
| "loss": 3.994, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5095092058704256e-05, |
| "loss": 3.9942, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5086706111193736e-05, |
| "loss": 4.0159, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5078320163683216e-05, |
| "loss": 3.9887, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5069934216172696e-05, |
| "loss": 4.0065, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5061548268662175e-05, |
| "loss": 4.0051, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5053178699955385e-05, |
| "loss": 3.9932, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5044809131248594e-05, |
| "loss": 3.9969, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5036423183738074e-05, |
| "loss": 3.9946, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5028037236227554e-05, |
| "loss": 3.9946, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501965128871704e-05, |
| "loss": 3.9939, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501126534120652e-05, |
| "loss": 3.9935, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.046631813049316, |
| "eval_runtime": 309.2483, |
| "eval_samples_per_second": 1233.931, |
| "eval_steps_per_second": 38.561, |
| "step": 915840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.5002879393696e-05, |
| "loss": 4.0069, |
| "step": 915968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.499449344618548e-05, |
| "loss": 3.9787, |
| "step": 916480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.498610749867496e-05, |
| "loss": 3.9898, |
| "step": 916992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.497772155116444e-05, |
| "loss": 4.0085, |
| "step": 917504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.496933560365392e-05, |
| "loss": 3.9993, |
| "step": 918016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.496094965614339e-05, |
| "loss": 3.9942, |
| "step": 918528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.495256370863287e-05, |
| "loss": 4.0075, |
| "step": 919040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.494417776112235e-05, |
| "loss": 3.9882, |
| "step": 919552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.493579181361183e-05, |
| "loss": 3.9865, |
| "step": 920064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.492740586610131e-05, |
| "loss": 4.0021, |
| "step": 920576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.491901991859079e-05, |
| "loss": 4.0019, |
| "step": 921088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.491063397108027e-05, |
| "loss": 3.9965, |
| "step": 921600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.490226440237349e-05, |
| "loss": 4.006, |
| "step": 922112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.489387845486297e-05, |
| "loss": 3.985, |
| "step": 922624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.488549250735245e-05, |
| "loss": 3.9858, |
| "step": 923136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.487710655984193e-05, |
| "loss": 3.9775, |
| "step": 923648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.486872061233141e-05, |
| "loss": 3.9958, |
| "step": 924160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.486033466482089e-05, |
| "loss": 3.9932, |
| "step": 924672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.485194871731037e-05, |
| "loss": 3.9878, |
| "step": 925184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.484356276979985e-05, |
| "loss": 3.9903, |
| "step": 925696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.483517682228933e-05, |
| "loss": 4.0146, |
| "step": 926208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.482679087477881e-05, |
| "loss": 3.9948, |
| "step": 926720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.481840492726828e-05, |
| "loss": 3.9979, |
| "step": 927232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.481001897975776e-05, |
| "loss": 3.9929, |
| "step": 927744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4801665789854707e-05, |
| "loss": 4.0001, |
| "step": 928256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4793279842344187e-05, |
| "loss": 3.9916, |
| "step": 928768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.47849102736374e-05, |
| "loss": 3.9912, |
| "step": 929280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.477652432612688e-05, |
| "loss": 3.9814, |
| "step": 929792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.476813837861636e-05, |
| "loss": 3.9954, |
| "step": 930304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.475975243110584e-05, |
| "loss": 3.9844, |
| "step": 930816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.475136648359532e-05, |
| "loss": 3.9935, |
| "step": 931328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.47429805360848e-05, |
| "loss": 3.9909, |
| "step": 931840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.473459458857428e-05, |
| "loss": 3.9972, |
| "step": 932352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4726208641063755e-05, |
| "loss": 4.0, |
| "step": 932864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4717822693553235e-05, |
| "loss": 3.9892, |
| "step": 933376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.470946950365018e-05, |
| "loss": 3.9962, |
| "step": 933888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.470108355613966e-05, |
| "loss": 3.9995, |
| "step": 934400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.469269760862914e-05, |
| "loss": 3.9738, |
| "step": 934912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.468431166111863e-05, |
| "loss": 3.994, |
| "step": 935424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.467592571360811e-05, |
| "loss": 3.9842, |
| "step": 935936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.466753976609758e-05, |
| "loss": 3.9845, |
| "step": 936448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.465915381858706e-05, |
| "loss": 3.9952, |
| "step": 936960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.465076787107654e-05, |
| "loss": 3.9888, |
| "step": 937472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.464238192356602e-05, |
| "loss": 3.9897, |
| "step": 937984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.46339959760555e-05, |
| "loss": 3.9948, |
| "step": 938496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.462561002854498e-05, |
| "loss": 3.9982, |
| "step": 939008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.461724045983819e-05, |
| "loss": 3.9976, |
| "step": 939520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.460885451232767e-05, |
| "loss": 3.9927, |
| "step": 940032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.460046856481715e-05, |
| "loss": 3.9656, |
| "step": 940544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.459208261730663e-05, |
| "loss": 3.9953, |
| "step": 941056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.458369666979611e-05, |
| "loss": 3.9927, |
| "step": 941568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4575310722285595e-05, |
| "loss": 3.9987, |
| "step": 942080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4566941153578805e-05, |
| "loss": 3.9905, |
| "step": 942592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4558555206068285e-05, |
| "loss": 3.9822, |
| "step": 943104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4550169258557764e-05, |
| "loss": 3.9766, |
| "step": 943616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4541783311047244e-05, |
| "loss": 3.9793, |
| "step": 944128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4533397363536724e-05, |
| "loss": 3.9858, |
| "step": 944640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4525011416026204e-05, |
| "loss": 3.9676, |
| "step": 945152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4516625468515684e-05, |
| "loss": 4.0055, |
| "step": 945664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4508239521005164e-05, |
| "loss": 3.9879, |
| "step": 946176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.449986995229837e-05, |
| "loss": 3.9702, |
| "step": 946688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.449148400478785e-05, |
| "loss": 3.9943, |
| "step": 947200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.448309805727733e-05, |
| "loss": 3.9751, |
| "step": 947712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.447471210976681e-05, |
| "loss": 3.9753, |
| "step": 948224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.446634254106003e-05, |
| "loss": 3.9861, |
| "step": 948736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.445795659354951e-05, |
| "loss": 3.9883, |
| "step": 949248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.444958702484272e-05, |
| "loss": 3.972, |
| "step": 949760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.44412010773322e-05, |
| "loss": 3.9881, |
| "step": 950272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.443281512982168e-05, |
| "loss": 3.9714, |
| "step": 950784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.442442918231116e-05, |
| "loss": 3.9825, |
| "step": 951296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.441604323480064e-05, |
| "loss": 3.9922, |
| "step": 951808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.440765728729012e-05, |
| "loss": 3.9874, |
| "step": 952320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439928771858333e-05, |
| "loss": 3.9847, |
| "step": 952832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439090177107281e-05, |
| "loss": 3.9824, |
| "step": 953344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.438251582356229e-05, |
| "loss": 4.0068, |
| "step": 953856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4374146254855496e-05, |
| "loss": 3.9691, |
| "step": 954368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.436576030734498e-05, |
| "loss": 3.9936, |
| "step": 954880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435737435983446e-05, |
| "loss": 3.9808, |
| "step": 955392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.434898841232394e-05, |
| "loss": 3.9808, |
| "step": 955904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4340602464813416e-05, |
| "loss": 3.9852, |
| "step": 956416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4332216517302896e-05, |
| "loss": 3.9848, |
| "step": 956928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4323830569792376e-05, |
| "loss": 3.9978, |
| "step": 957440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4315444622281856e-05, |
| "loss": 3.9948, |
| "step": 957952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4307058674771336e-05, |
| "loss": 3.9701, |
| "step": 958464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4298672727260816e-05, |
| "loss": 3.9707, |
| "step": 958976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4290286779750296e-05, |
| "loss": 3.9773, |
| "step": 959488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4281900832239776e-05, |
| "loss": 3.9904, |
| "step": 960000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4273531263532985e-05, |
| "loss": 3.9798, |
| "step": 960512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4265145316022465e-05, |
| "loss": 3.9785, |
| "step": 961024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.425677574731568e-05, |
| "loss": 3.9902, |
| "step": 961536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.424838979980516e-05, |
| "loss": 3.9831, |
| "step": 962048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.424000385229464e-05, |
| "loss": 3.9871, |
| "step": 962560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.423163428358785e-05, |
| "loss": 3.9751, |
| "step": 963072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.422324833607733e-05, |
| "loss": 3.9853, |
| "step": 963584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.421486238856681e-05, |
| "loss": 3.9866, |
| "step": 964096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.420647644105629e-05, |
| "loss": 3.9922, |
| "step": 964608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419809049354577e-05, |
| "loss": 3.9717, |
| "step": 965120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418972092483898e-05, |
| "loss": 3.9726, |
| "step": 965632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418133497732846e-05, |
| "loss": 3.985, |
| "step": 966144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.417294902981794e-05, |
| "loss": 3.9845, |
| "step": 966656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.416456308230742e-05, |
| "loss": 3.9953, |
| "step": 967168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4156177134796905e-05, |
| "loss": 3.9772, |
| "step": 967680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4147791187286385e-05, |
| "loss": 3.9795, |
| "step": 968192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4139405239775865e-05, |
| "loss": 3.9769, |
| "step": 968704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4131019292265345e-05, |
| "loss": 3.9746, |
| "step": 969216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4122633344754825e-05, |
| "loss": 3.9777, |
| "step": 969728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4114263776048034e-05, |
| "loss": 3.9781, |
| "step": 970240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4105877828537514e-05, |
| "loss": 3.9814, |
| "step": 970752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4097491881026994e-05, |
| "loss": 3.9781, |
| "step": 971264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4089105933516474e-05, |
| "loss": 3.9759, |
| "step": 971776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.408071998600595e-05, |
| "loss": 3.9767, |
| "step": 972288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.407233403849543e-05, |
| "loss": 3.9683, |
| "step": 972800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.406396446978864e-05, |
| "loss": 3.9877, |
| "step": 973312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.405557852227812e-05, |
| "loss": 3.986, |
| "step": 973824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40471925747676e-05, |
| "loss": 3.9846, |
| "step": 974336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.403880662725708e-05, |
| "loss": 3.9888, |
| "step": 974848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.403042067974656e-05, |
| "loss": 3.9869, |
| "step": 975360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.402205111103978e-05, |
| "loss": 3.9827, |
| "step": 975872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.401366516352925e-05, |
| "loss": 3.9755, |
| "step": 976384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.400527921601873e-05, |
| "loss": 3.9858, |
| "step": 976896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.399689326850821e-05, |
| "loss": 3.9787, |
| "step": 977408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398852369980142e-05, |
| "loss": 3.9801, |
| "step": 977920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.39801377522909e-05, |
| "loss": 3.985, |
| "step": 978432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.397175180478038e-05, |
| "loss": 3.9739, |
| "step": 978944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.396336585726986e-05, |
| "loss": 3.9822, |
| "step": 979456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.395497990975934e-05, |
| "loss": 3.9708, |
| "step": 979968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.394659396224882e-05, |
| "loss": 3.9753, |
| "step": 980480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.393820801473831e-05, |
| "loss": 3.9823, |
| "step": 980992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3929838446031516e-05, |
| "loss": 3.9733, |
| "step": 981504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3921452498520996e-05, |
| "loss": 3.9923, |
| "step": 982016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3913066551010476e-05, |
| "loss": 3.9816, |
| "step": 982528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3904680603499956e-05, |
| "loss": 3.9829, |
| "step": 983040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3896294655989436e-05, |
| "loss": 3.9753, |
| "step": 983552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3887908708478916e-05, |
| "loss": 3.9826, |
| "step": 984064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3879522760968396e-05, |
| "loss": 3.9878, |
| "step": 984576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3871153192261605e-05, |
| "loss": 3.9867, |
| "step": 985088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3862767244751085e-05, |
| "loss": 3.9804, |
| "step": 985600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3854397676044294e-05, |
| "loss": 3.9757, |
| "step": 986112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3846011728533774e-05, |
| "loss": 3.9817, |
| "step": 986624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.383762578102326e-05, |
| "loss": 3.9998, |
| "step": 987136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.382923983351274e-05, |
| "loss": 3.9781, |
| "step": 987648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.382085388600222e-05, |
| "loss": 3.988, |
| "step": 988160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.38124679384917e-05, |
| "loss": 3.9903, |
| "step": 988672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.380408199098118e-05, |
| "loss": 3.9813, |
| "step": 989184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.379569604347066e-05, |
| "loss": 3.9816, |
| "step": 989696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.378732647476387e-05, |
| "loss": 3.978, |
| "step": 990208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377894052725335e-05, |
| "loss": 3.9792, |
| "step": 990720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377057095854656e-05, |
| "loss": 3.9825, |
| "step": 991232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.376218501103604e-05, |
| "loss": 3.9739, |
| "step": 991744 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.037889003753662, |
| "eval_runtime": 314.8622, |
| "eval_samples_per_second": 1211.93, |
| "eval_steps_per_second": 37.874, |
| "step": 992160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.375379906352552e-05, |
| "loss": 3.9785, |
| "step": 992256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3745413116015e-05, |
| "loss": 3.9631, |
| "step": 992768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.373702716850448e-05, |
| "loss": 3.9756, |
| "step": 993280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372864122099396e-05, |
| "loss": 3.9961, |
| "step": 993792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372025527348344e-05, |
| "loss": 3.983, |
| "step": 994304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.371186932597292e-05, |
| "loss": 3.9793, |
| "step": 994816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.37034833784624e-05, |
| "loss": 3.9862, |
| "step": 995328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.369509743095188e-05, |
| "loss": 3.9767, |
| "step": 995840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.368672786224509e-05, |
| "loss": 3.971, |
| "step": 996352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367834191473457e-05, |
| "loss": 3.9896, |
| "step": 996864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.366995596722405e-05, |
| "loss": 3.9801, |
| "step": 997376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.366157001971353e-05, |
| "loss": 3.9836, |
| "step": 997888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.365321682981047e-05, |
| "loss": 3.9894, |
| "step": 998400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.364483088229995e-05, |
| "loss": 3.9682, |
| "step": 998912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.363644493478943e-05, |
| "loss": 3.9728, |
| "step": 999424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.362805898727891e-05, |
| "loss": 3.9628, |
| "step": 999936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.361967303976839e-05, |
| "loss": 3.9815, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.361130347106161e-05, |
| "loss": 3.9777, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.360291752355109e-05, |
| "loss": 3.9727, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.359453157604056e-05, |
| "loss": 3.9732, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.358614562853004e-05, |
| "loss": 3.999, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.357775968101952e-05, |
| "loss": 3.979, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3569373733509e-05, |
| "loss": 3.9837, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.356098778599848e-05, |
| "loss": 3.9818, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.355260183848796e-05, |
| "loss": 3.9848, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.354421589097744e-05, |
| "loss": 3.9757, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.353582994346692e-05, |
| "loss": 3.9737, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.35274439959564e-05, |
| "loss": 3.9724, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351905804844588e-05, |
| "loss": 3.9727, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3510704858542826e-05, |
| "loss": 3.9719, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3502318911032306e-05, |
| "loss": 3.9787, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3493932963521786e-05, |
| "loss": 3.9717, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3485547016011266e-05, |
| "loss": 3.988, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3477161068500746e-05, |
| "loss": 3.9807, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3468775120990226e-05, |
| "loss": 3.9774, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3460405552283435e-05, |
| "loss": 3.9844, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3452035983576644e-05, |
| "loss": 3.9809, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3443650036066124e-05, |
| "loss": 3.9567, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3435264088555604e-05, |
| "loss": 3.9794, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.342687814104509e-05, |
| "loss": 3.9694, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341849219353457e-05, |
| "loss": 3.9695, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341010624602405e-05, |
| "loss": 3.9793, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.340172029851353e-05, |
| "loss": 3.9776, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.339333435100301e-05, |
| "loss": 3.9742, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.338494840349249e-05, |
| "loss": 3.9838, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.337656245598197e-05, |
| "loss": 3.9808, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336817650847144e-05, |
| "loss": 3.9805, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335979056096092e-05, |
| "loss": 3.9795, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335142099225414e-05, |
| "loss": 3.954, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.334303504474362e-05, |
| "loss": 3.9762, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.333464909723309e-05, |
| "loss": 3.9826, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.332626314972257e-05, |
| "loss": 3.9796, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331787720221205e-05, |
| "loss": 3.9741, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330950763350527e-05, |
| "loss": 3.9678, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330112168599475e-05, |
| "loss": 3.9631, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.329273573848423e-05, |
| "loss": 3.9616, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.328434979097371e-05, |
| "loss": 3.9769, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.327598022226692e-05, |
| "loss": 3.954, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.32675942747564e-05, |
| "loss": 3.9836, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325922470604961e-05, |
| "loss": 3.9758, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325083875853909e-05, |
| "loss": 3.9594, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3242452811028566e-05, |
| "loss": 3.9771, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3234066863518046e-05, |
| "loss": 3.9594, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.322569729481126e-05, |
| "loss": 3.9638, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321731134730074e-05, |
| "loss": 3.9675, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320892539979022e-05, |
| "loss": 3.9779, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.32005394522797e-05, |
| "loss": 3.9603, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.319215350476918e-05, |
| "loss": 3.9719, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.318376755725866e-05, |
| "loss": 3.9554, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.317538160974814e-05, |
| "loss": 3.9687, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.316699566223762e-05, |
| "loss": 3.9753, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315862609353083e-05, |
| "loss": 3.9745, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315024014602031e-05, |
| "loss": 3.9681, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.314185419850979e-05, |
| "loss": 3.9706, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.313346825099927e-05, |
| "loss": 3.9898, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.312509868229248e-05, |
| "loss": 3.9566, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.311671273478196e-05, |
| "loss": 3.981, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3108326787271446e-05, |
| "loss": 3.9678, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3099940839760926e-05, |
| "loss": 3.9639, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3091554892250406e-05, |
| "loss": 3.9676, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3083168944739886e-05, |
| "loss": 3.972, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3074799376033095e-05, |
| "loss": 3.9815, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3066413428522575e-05, |
| "loss": 3.9843, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3058027481012055e-05, |
| "loss": 3.9585, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3049657912305264e-05, |
| "loss": 3.9565, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3041271964794744e-05, |
| "loss": 3.9639, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3032886017284224e-05, |
| "loss": 3.9746, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3024500069773704e-05, |
| "loss": 3.9661, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.301613050106691e-05, |
| "loss": 3.9651, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.30077445535564e-05, |
| "loss": 3.9745, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299935860604588e-05, |
| "loss": 3.9664, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299097265853536e-05, |
| "loss": 3.9763, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.298258671102484e-05, |
| "loss": 3.9597, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.297420076351432e-05, |
| "loss": 3.973, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29658148160038e-05, |
| "loss": 3.9684, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.295744524729701e-05, |
| "loss": 3.9824, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.294905929978649e-05, |
| "loss": 3.9555, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.294067335227597e-05, |
| "loss": 3.9588, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.293228740476545e-05, |
| "loss": 3.9754, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.292390145725493e-05, |
| "loss": 3.9682, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29155155097444e-05, |
| "loss": 3.9786, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.290712956223388e-05, |
| "loss": 3.9657, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289874361472337e-05, |
| "loss": 3.9644, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289035766721285e-05, |
| "loss": 3.9625, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.288197171970233e-05, |
| "loss": 3.9657, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.287360215099554e-05, |
| "loss": 3.9642, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.286523258228875e-05, |
| "loss": 3.9623, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2856846634778227e-05, |
| "loss": 3.9684, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2848460687267707e-05, |
| "loss": 3.9618, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2840074739757186e-05, |
| "loss": 3.9647, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2831688792246666e-05, |
| "loss": 3.9626, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2823302844736146e-05, |
| "loss": 3.9524, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2814933276029355e-05, |
| "loss": 3.9747, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2806547328518835e-05, |
| "loss": 3.9676, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279816138100832e-05, |
| "loss": 3.9711, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.27897754334978e-05, |
| "loss": 3.9769, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278138948598728e-05, |
| "loss": 3.9727, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.277300353847676e-05, |
| "loss": 3.967, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.276461759096624e-05, |
| "loss": 3.9615, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.275623164345572e-05, |
| "loss": 3.9699, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.27478456959452e-05, |
| "loss": 3.9642, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.273945974843468e-05, |
| "loss": 3.9628, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.273107380092416e-05, |
| "loss": 3.972, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.272268785341364e-05, |
| "loss": 3.965, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.271431828470685e-05, |
| "loss": 3.9661, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.270593233719633e-05, |
| "loss": 3.9603, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.269754638968581e-05, |
| "loss": 3.9585, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2689193199782756e-05, |
| "loss": 3.9708, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2680807252272236e-05, |
| "loss": 3.958, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2672421304761716e-05, |
| "loss": 3.9704, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2664035357251196e-05, |
| "loss": 3.9751, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2655649409740676e-05, |
| "loss": 3.967, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2647263462230156e-05, |
| "loss": 3.9595, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2638877514719636e-05, |
| "loss": 3.9729, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2630491567209115e-05, |
| "loss": 3.9717, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2622121998502325e-05, |
| "loss": 3.9707, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2613736050991805e-05, |
| "loss": 3.9628, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2605350103481284e-05, |
| "loss": 3.9627, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2596964155970764e-05, |
| "loss": 3.9739, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.258857820846024e-05, |
| "loss": 3.9852, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2580192260949724e-05, |
| "loss": 3.9607, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2571806313439204e-05, |
| "loss": 3.9727, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2563420365928684e-05, |
| "loss": 3.9762, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.255505079722189e-05, |
| "loss": 3.9659, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.254668122851511e-05, |
| "loss": 3.9693, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253829528100459e-05, |
| "loss": 3.9643, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252990933349406e-05, |
| "loss": 3.9638, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252152338598354e-05, |
| "loss": 3.9763, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.251313743847302e-05, |
| "loss": 3.9532, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.0302228927612305, |
| "eval_runtime": 317.25, |
| "eval_samples_per_second": 1202.808, |
| "eval_steps_per_second": 37.589, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.250476786976624e-05, |
| "loss": 3.9771, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.249638192225571e-05, |
| "loss": 3.9528, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.248799597474519e-05, |
| "loss": 3.9601, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247961002723468e-05, |
| "loss": 3.9806, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247122407972416e-05, |
| "loss": 3.9734, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.246283813221364e-05, |
| "loss": 3.9638, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.245445218470312e-05, |
| "loss": 3.9706, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.24460662371926e-05, |
| "loss": 3.96, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.243768028968208e-05, |
| "loss": 3.9583, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.242929434217156e-05, |
| "loss": 3.973, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.242090839466104e-05, |
| "loss": 3.9696, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.241252244715052e-05, |
| "loss": 3.9713, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.240415287844373e-05, |
| "loss": 3.9764, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.239576693093321e-05, |
| "loss": 3.956, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.238738098342269e-05, |
| "loss": 3.9556, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237899503591217e-05, |
| "loss": 3.9504, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2370609088401647e-05, |
| "loss": 3.9685, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.236223951969486e-05, |
| "loss": 3.9618, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.235385357218434e-05, |
| "loss": 3.9618, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.234546762467382e-05, |
| "loss": 3.9642, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23370816771633e-05, |
| "loss": 3.9819, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2328695729652775e-05, |
| "loss": 3.9712, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2320309782142255e-05, |
| "loss": 3.9638, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2311923834631735e-05, |
| "loss": 3.9686, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2303537887121215e-05, |
| "loss": 3.9722, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2295151939610695e-05, |
| "loss": 3.9648, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2286782370903904e-05, |
| "loss": 3.9577, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.227841280219712e-05, |
| "loss": 3.9586, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.22700268546866e-05, |
| "loss": 3.9568, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.226164090717608e-05, |
| "loss": 3.9608, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.225325495966556e-05, |
| "loss": 3.9654, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.224486901215504e-05, |
| "loss": 3.9629, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.223648306464452e-05, |
| "loss": 3.9678, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2228097117134e-05, |
| "loss": 3.9675, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221971116962348e-05, |
| "loss": 3.9668, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221134160091669e-05, |
| "loss": 3.97, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.220295565340617e-05, |
| "loss": 3.9714, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.219456970589565e-05, |
| "loss": 3.9443, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.218618375838513e-05, |
| "loss": 3.9705, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217781418967834e-05, |
| "loss": 3.949, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.216942824216782e-05, |
| "loss": 3.9598, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.21610422946573e-05, |
| "loss": 3.9624, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.215265634714678e-05, |
| "loss": 3.9697, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2144270399636265e-05, |
| "loss": 3.9593, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2135900830929474e-05, |
| "loss": 3.9664, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2127514883418954e-05, |
| "loss": 3.9732, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2119128935908434e-05, |
| "loss": 3.9644, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2110742988397914e-05, |
| "loss": 3.9691, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.210237341969112e-05, |
| "loss": 3.9389, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.20939874721806e-05, |
| "loss": 3.9605, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.208560152467008e-05, |
| "loss": 3.9705, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.207721557715956e-05, |
| "loss": 3.9673, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206884600845277e-05, |
| "loss": 3.962, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206046006094225e-05, |
| "loss": 3.9512, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.205207411343173e-05, |
| "loss": 3.9559, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.204368816592122e-05, |
| "loss": 3.9424, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.20353022184107e-05, |
| "loss": 3.9686, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.202691627090018e-05, |
| "loss": 3.9404, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.201853032338966e-05, |
| "loss": 3.9679, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2010177133486596e-05, |
| "loss": 3.9631, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2001791185976076e-05, |
| "loss": 3.9486, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1993405238465556e-05, |
| "loss": 3.9645, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1985019290955036e-05, |
| "loss": 3.9463, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1976633343444516e-05, |
| "loss": 3.9498, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1968247395933996e-05, |
| "loss": 3.9521, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1959861448423476e-05, |
| "loss": 3.9629, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1951475500912956e-05, |
| "loss": 3.946, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1943089553402436e-05, |
| "loss": 3.9562, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1934703605891916e-05, |
| "loss": 3.943, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.192633403718513e-05, |
| "loss": 3.9516, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.191794808967461e-05, |
| "loss": 3.9651, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1909562142164085e-05, |
| "loss": 3.9628, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1901176194653565e-05, |
| "loss": 3.9544, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.189280662594678e-05, |
| "loss": 3.9605, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.188443705723999e-05, |
| "loss": 3.9724, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.187605110972947e-05, |
| "loss": 3.9479, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.186766516221895e-05, |
| "loss": 3.9648, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.185927921470843e-05, |
| "loss": 3.9542, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.185089326719791e-05, |
| "loss": 3.9548, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.184250731968739e-05, |
| "loss": 3.9531, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.183412137217687e-05, |
| "loss": 3.9577, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.182573542466635e-05, |
| "loss": 3.9707, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181734947715583e-05, |
| "loss": 3.967, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.180896352964531e-05, |
| "loss": 3.9551, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.180057758213479e-05, |
| "loss": 3.9383, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.179219163462427e-05, |
| "loss": 3.9493, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.178382206591748e-05, |
| "loss": 3.9615, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.177545249721069e-05, |
| "loss": 3.9535, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.176706654970017e-05, |
| "loss": 3.9525, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.175868060218965e-05, |
| "loss": 3.9629, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1750311033482863e-05, |
| "loss": 3.9546, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.174192508597234e-05, |
| "loss": 3.959, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.173353913846182e-05, |
| "loss": 3.9487, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.17251531909513e-05, |
| "loss": 3.9611, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.171676724344078e-05, |
| "loss": 3.9518, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170838129593026e-05, |
| "loss": 3.9695, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170001172722347e-05, |
| "loss": 3.9464, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.169162577971295e-05, |
| "loss": 3.9457, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.168323983220243e-05, |
| "loss": 3.9633, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.167485388469191e-05, |
| "loss": 3.9554, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.166648431598512e-05, |
| "loss": 3.9641, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.16580983684746e-05, |
| "loss": 3.9535, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164971242096408e-05, |
| "loss": 3.953, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164132647345356e-05, |
| "loss": 3.9492, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.163294052594304e-05, |
| "loss": 3.9532, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.162455457843253e-05, |
| "loss": 3.946, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.161616863092201e-05, |
| "loss": 3.952, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.160778268341149e-05, |
| "loss": 3.9552, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.159939673590097e-05, |
| "loss": 3.9486, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.159102716719418e-05, |
| "loss": 3.9481, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.158264121968366e-05, |
| "loss": 3.9558, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.157425527217314e-05, |
| "loss": 3.9389, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.156586932466262e-05, |
| "loss": 3.9597, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.155748337715209e-05, |
| "loss": 3.9535, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.154909742964157e-05, |
| "loss": 3.9592, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.154071148213105e-05, |
| "loss": 3.9617, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.153232553462053e-05, |
| "loss": 3.9604, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.152397234471748e-05, |
| "loss": 3.9541, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.151558639720696e-05, |
| "loss": 3.9517, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150720044969644e-05, |
| "loss": 3.9549, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149881450218592e-05, |
| "loss": 3.953, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1490428554675395e-05, |
| "loss": 3.9458, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1482042607164874e-05, |
| "loss": 3.9628, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1473656659654354e-05, |
| "loss": 3.9483, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1465270712143834e-05, |
| "loss": 3.9569, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1456884764633314e-05, |
| "loss": 3.9458, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1448498817122794e-05, |
| "loss": 3.9444, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1440129248416003e-05, |
| "loss": 3.9611, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.143174330090548e-05, |
| "loss": 3.9493, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.142335735339496e-05, |
| "loss": 3.9531, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.141497140588445e-05, |
| "loss": 3.9644, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.140658545837393e-05, |
| "loss": 3.9511, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.139819951086341e-05, |
| "loss": 3.9486, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.138981356335289e-05, |
| "loss": 3.9601, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.13814439946461e-05, |
| "loss": 3.9593, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.137305804713558e-05, |
| "loss": 3.9586, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.136467209962506e-05, |
| "loss": 3.949, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.135628615211454e-05, |
| "loss": 3.953, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134790020460402e-05, |
| "loss": 3.9649, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.13395142570935e-05, |
| "loss": 3.9675, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.133112830958298e-05, |
| "loss": 3.9535, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.132274236207245e-05, |
| "loss": 3.9545, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.131437279336567e-05, |
| "loss": 3.9644, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.130598684585515e-05, |
| "loss": 3.9562, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1297600898344634e-05, |
| "loss": 3.9548, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128921495083411e-05, |
| "loss": 3.9515, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1280845382127324e-05, |
| "loss": 3.9502, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.127247581342053e-05, |
| "loss": 3.9636, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.126408986591001e-05, |
| "loss": 3.9432, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.023078918457031, |
| "eval_runtime": 294.5903, |
| "eval_samples_per_second": 1295.328, |
| "eval_steps_per_second": 40.48, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.125570391839949e-05, |
| "loss": 3.951, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.124731797088897e-05, |
| "loss": 3.9443, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.123893202337845e-05, |
| "loss": 3.9452, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1230546075867926e-05, |
| "loss": 3.9676, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1222160128357406e-05, |
| "loss": 3.9638, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1213774180846886e-05, |
| "loss": 3.95, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.120538823333637e-05, |
| "loss": 3.9584, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.119701866462958e-05, |
| "loss": 3.9498, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.11886490959228e-05, |
| "loss": 3.9504, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.118026314841228e-05, |
| "loss": 3.9588, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.117187720090176e-05, |
| "loss": 3.9534, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.116349125339123e-05, |
| "loss": 3.9581, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1155138063488175e-05, |
| "loss": 3.9668, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1146752115977655e-05, |
| "loss": 3.9458, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1138366168467135e-05, |
| "loss": 3.9438, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1129980220956615e-05, |
| "loss": 3.9371, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1121594273446095e-05, |
| "loss": 3.9543, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1113208325935575e-05, |
| "loss": 3.9537, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1104822378425055e-05, |
| "loss": 3.9483, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1096436430914535e-05, |
| "loss": 3.9507, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1088050483404015e-05, |
| "loss": 3.9672, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1079664535893495e-05, |
| "loss": 3.9546, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1071278588382975e-05, |
| "loss": 3.9572, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1062909019676184e-05, |
| "loss": 3.9524, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1054523072165664e-05, |
| "loss": 3.9632, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1046137124655144e-05, |
| "loss": 3.9486, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1037751177144624e-05, |
| "loss": 3.9497, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1029365229634104e-05, |
| "loss": 3.9461, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.102099566092731e-05, |
| "loss": 3.9451, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.101260971341679e-05, |
| "loss": 3.9489, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.100424014471001e-05, |
| "loss": 3.9487, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.099585419719949e-05, |
| "loss": 3.9503, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.098746824968897e-05, |
| "loss": 3.9542, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.097908230217845e-05, |
| "loss": 3.9542, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.097069635466793e-05, |
| "loss": 3.9592, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.096232678596114e-05, |
| "loss": 3.9549, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.095394083845062e-05, |
| "loss": 3.9579, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.09455548909401e-05, |
| "loss": 3.9338, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.093716894342958e-05, |
| "loss": 3.9593, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.092878299591906e-05, |
| "loss": 3.9335, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.092039704840854e-05, |
| "loss": 3.9499, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.091201110089802e-05, |
| "loss": 3.9508, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.09036251533875e-05, |
| "loss": 3.9585, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.089523920587698e-05, |
| "loss": 3.9443, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.088686963717019e-05, |
| "loss": 3.9576, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.087848368965967e-05, |
| "loss": 3.9578, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.087009774214915e-05, |
| "loss": 3.9558, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.086171179463863e-05, |
| "loss": 3.9547, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.085334222593184e-05, |
| "loss": 3.9285, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.084497265722505e-05, |
| "loss": 3.9492, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.083658670971453e-05, |
| "loss": 3.9602, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.082820076220401e-05, |
| "loss": 3.9536, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.081981481469349e-05, |
| "loss": 3.9494, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.081142886718297e-05, |
| "loss": 3.9396, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.080304291967245e-05, |
| "loss": 3.9434, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.079467335096567e-05, |
| "loss": 3.9325, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.078628740345515e-05, |
| "loss": 3.9597, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.077790145594463e-05, |
| "loss": 3.9305, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.076951550843411e-05, |
| "loss": 3.9542, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.076112956092359e-05, |
| "loss": 3.9527, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.075274361341306e-05, |
| "loss": 3.9408, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.074435766590254e-05, |
| "loss": 3.9469, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.073597171839202e-05, |
| "loss": 3.936, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.07275857708815e-05, |
| "loss": 3.94, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.071919982337098e-05, |
| "loss": 3.936, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.071081387586046e-05, |
| "loss": 3.9568, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.070242792834994e-05, |
| "loss": 3.9391, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.069405835964315e-05, |
| "loss": 3.9382, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0685672412132635e-05, |
| "loss": 3.9335, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0677286464622115e-05, |
| "loss": 3.943, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0668900517111595e-05, |
| "loss": 3.9558, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0660514569601075e-05, |
| "loss": 3.9493, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0652161379698014e-05, |
| "loss": 3.9405, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0643775432187494e-05, |
| "loss": 3.951, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0635389484676973e-05, |
| "loss": 3.9604, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0627003537166453e-05, |
| "loss": 3.9358, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0618617589655933e-05, |
| "loss": 3.9553, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.061023164214541e-05, |
| "loss": 3.9437, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.060186207343862e-05, |
| "loss": 3.9432, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.05934761259281e-05, |
| "loss": 3.939, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.058509017841759e-05, |
| "loss": 3.9467, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.057670423090707e-05, |
| "loss": 3.9577, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.056833466220028e-05, |
| "loss": 3.9545, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.055994871468976e-05, |
| "loss": 3.9441, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.055156276717924e-05, |
| "loss": 3.9231, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.054317681966872e-05, |
| "loss": 3.9427, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.05347908721582e-05, |
| "loss": 3.9463, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.052640492464768e-05, |
| "loss": 3.9479, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.051801897713716e-05, |
| "loss": 3.9364, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0509633029626638e-05, |
| "loss": 3.9505, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0501247082116118e-05, |
| "loss": 3.947, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.049287751340933e-05, |
| "loss": 3.9485, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.048449156589881e-05, |
| "loss": 3.9355, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.047610561838829e-05, |
| "loss": 3.95, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.046771967087777e-05, |
| "loss": 3.9378, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.045935010217098e-05, |
| "loss": 3.9566, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.045096415466046e-05, |
| "loss": 3.9407, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0442578207149943e-05, |
| "loss": 3.9317, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0434192259639423e-05, |
| "loss": 3.9472, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0425806312128896e-05, |
| "loss": 3.947, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.041743674342211e-05, |
| "loss": 3.9524, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.040905079591159e-05, |
| "loss": 3.9448, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.040066484840107e-05, |
| "loss": 3.9406, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0392278900890548e-05, |
| "loss": 3.9365, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0383892953380028e-05, |
| "loss": 3.9467, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0375507005869508e-05, |
| "loss": 3.9312, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0367137437162717e-05, |
| "loss": 3.9449, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0358751489652197e-05, |
| "loss": 3.9427, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.035036554214168e-05, |
| "loss": 3.9413, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.034197959463116e-05, |
| "loss": 3.9323, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.033361002592437e-05, |
| "loss": 3.9436, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.032522407841385e-05, |
| "loss": 3.9324, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.031683813090333e-05, |
| "loss": 3.9471, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.030845218339281e-05, |
| "loss": 3.9414, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.030006623588229e-05, |
| "loss": 3.943, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0291680288371773e-05, |
| "loss": 3.95, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.028331071966498e-05, |
| "loss": 3.9501, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.027492477215446e-05, |
| "loss": 3.946, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.026653882464394e-05, |
| "loss": 3.9374, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.025815287713342e-05, |
| "loss": 3.9448, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.02497669296229e-05, |
| "loss": 3.9434, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.024138098211238e-05, |
| "loss": 3.9362, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.023299503460186e-05, |
| "loss": 3.9481, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0224609087091345e-05, |
| "loss": 3.939, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0216223139580825e-05, |
| "loss": 3.9467, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0207853570874034e-05, |
| "loss": 3.9351, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0199484002167243e-05, |
| "loss": 3.9362, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0191098054656726e-05, |
| "loss": 3.944, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0182712107146206e-05, |
| "loss": 3.9382, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0174326159635686e-05, |
| "loss": 3.9371, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0165940212125166e-05, |
| "loss": 3.9571, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0157570643418375e-05, |
| "loss": 3.944, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0149184695907855e-05, |
| "loss": 3.9377, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0140798748397335e-05, |
| "loss": 3.948, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0132412800886815e-05, |
| "loss": 3.946, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.01240268533763e-05, |
| "loss": 3.9493, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0115657284669508e-05, |
| "loss": 3.9416, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0107271337158987e-05, |
| "loss": 3.9388, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0098885389648467e-05, |
| "loss": 3.9521, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0090499442137947e-05, |
| "loss": 3.9553, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0082113494627427e-05, |
| "loss": 3.9436, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.007374392592064e-05, |
| "loss": 3.9422, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.006535797841012e-05, |
| "loss": 3.956, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.00569720308996e-05, |
| "loss": 3.9408, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.004860246219281e-05, |
| "loss": 3.9446, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.004021651468229e-05, |
| "loss": 3.9412, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.003183056717177e-05, |
| "loss": 3.9397, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0023444619661252e-05, |
| "loss": 3.9521, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0015058672150732e-05, |
| "loss": 3.9342, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 3.0006672724640205e-05, |
| "loss": 3.939, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.017744541168213, |
| "eval_runtime": 295.6323, |
| "eval_samples_per_second": 1290.762, |
| "eval_steps_per_second": 40.337, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9998286777129685e-05, |
| "loss": 3.9309, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9989900829619165e-05, |
| "loss": 3.9332, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9981514882108645e-05, |
| "loss": 3.9562, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.997312893459813e-05, |
| "loss": 3.9532, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.996474298708761e-05, |
| "loss": 3.9406, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.995635703957709e-05, |
| "loss": 3.9477, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9947971092066568e-05, |
| "loss": 3.9378, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9939585144556048e-05, |
| "loss": 3.9345, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9931199197045528e-05, |
| "loss": 3.9511, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9922813249535008e-05, |
| "loss": 3.9419, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9914427302024488e-05, |
| "loss": 3.9503, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9906041354513968e-05, |
| "loss": 3.9498, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.989767178580718e-05, |
| "loss": 3.9414, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.988928583829666e-05, |
| "loss": 3.9346, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.988089989078614e-05, |
| "loss": 3.9251, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.987251394327562e-05, |
| "loss": 3.9393, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9864127995765097e-05, |
| "loss": 3.9434, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9855742048254577e-05, |
| "loss": 3.9392, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9847372479547793e-05, |
| "loss": 3.9391, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9838986532037266e-05, |
| "loss": 3.9604, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9830600584526746e-05, |
| "loss": 3.9435, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9822214637016226e-05, |
| "loss": 3.9444, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9813828689505706e-05, |
| "loss": 3.9417, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.980544274199519e-05, |
| "loss": 3.9576, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.979705679448467e-05, |
| "loss": 3.9327, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.978867084697415e-05, |
| "loss": 3.9389, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.978028489946363e-05, |
| "loss": 3.934, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9771915330756838e-05, |
| "loss": 3.9384, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9763529383246318e-05, |
| "loss": 3.9386, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9755143435735798e-05, |
| "loss": 3.9373, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.974675748822528e-05, |
| "loss": 3.9406, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.973837154071476e-05, |
| "loss": 3.9451, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.972998559320424e-05, |
| "loss": 3.9407, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.972159964569372e-05, |
| "loss": 3.9512, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.971323007698693e-05, |
| "loss": 3.9407, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.970484412947641e-05, |
| "loss": 3.9491, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9696474560769623e-05, |
| "loss": 3.9245, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9688088613259103e-05, |
| "loss": 3.9495, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9679702665748583e-05, |
| "loss": 3.9248, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9671316718238063e-05, |
| "loss": 3.9417, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9662930770727543e-05, |
| "loss": 3.9352, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9654544823217023e-05, |
| "loss": 3.9467, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9646158875706503e-05, |
| "loss": 3.9339, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9637772928195983e-05, |
| "loss": 3.9462, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.962938698068546e-05, |
| "loss": 3.9445, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9621017411978675e-05, |
| "loss": 3.9442, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9612631464468155e-05, |
| "loss": 3.9451, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9604261895761364e-05, |
| "loss": 3.9201, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9595875948250844e-05, |
| "loss": 3.9388, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9587490000740324e-05, |
| "loss": 3.9477, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9579104053229807e-05, |
| "loss": 3.9415, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.957071810571928e-05, |
| "loss": 3.9422, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.956233215820876e-05, |
| "loss": 3.9281, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.955394621069824e-05, |
| "loss": 3.9344, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.954556026318772e-05, |
| "loss": 3.9203, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.95371743156772e-05, |
| "loss": 3.9507, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9528788368166684e-05, |
| "loss": 3.9174, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9520402420656164e-05, |
| "loss": 3.9447, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9512016473145644e-05, |
| "loss": 3.9398, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9503663283242582e-05, |
| "loss": 3.9293, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9495277335732062e-05, |
| "loss": 3.9338, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9486891388221545e-05, |
| "loss": 3.9308, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9478505440711025e-05, |
| "loss": 3.93, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9470119493200505e-05, |
| "loss": 3.9239, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9461733545689985e-05, |
| "loss": 3.9459, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9453347598179465e-05, |
| "loss": 3.9312, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9444961650668945e-05, |
| "loss": 3.9273, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9436592081962154e-05, |
| "loss": 3.9244, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9428222513255366e-05, |
| "loss": 3.9302, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9419836565744846e-05, |
| "loss": 3.9434, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9411450618234326e-05, |
| "loss": 3.9428, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9403064670723806e-05, |
| "loss": 3.9293, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9394695102017015e-05, |
| "loss": 3.9424, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93863091545065e-05, |
| "loss": 3.9464, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9377939585799708e-05, |
| "loss": 3.9263, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9369553638289188e-05, |
| "loss": 3.9412, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9361167690778668e-05, |
| "loss": 3.9359, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9352781743268148e-05, |
| "loss": 3.9356, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9344395795757628e-05, |
| "loss": 3.9301, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9336009848247108e-05, |
| "loss": 3.9352, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.932762390073659e-05, |
| "loss": 3.9444, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.931923795322607e-05, |
| "loss": 3.9512, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.931085200571555e-05, |
| "loss": 3.9289, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.930246605820503e-05, |
| "loss": 3.9119, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.929409648949824e-05, |
| "loss": 3.9318, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.928571054198772e-05, |
| "loss": 3.936, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.92773245944772e-05, |
| "loss": 3.9368, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9268938646966683e-05, |
| "loss": 3.9315, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9260552699456163e-05, |
| "loss": 3.9373, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9252166751945643e-05, |
| "loss": 3.9376, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9243797183238852e-05, |
| "loss": 3.9364, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9235411235728332e-05, |
| "loss": 3.9248, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9227025288217812e-05, |
| "loss": 3.9386, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.921863934070729e-05, |
| "loss": 3.9287, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.921025339319677e-05, |
| "loss": 3.9445, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9201883824489984e-05, |
| "loss": 3.9332, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9193497876979464e-05, |
| "loss": 3.9184, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9185111929468938e-05, |
| "loss": 3.9371, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.917672598195842e-05, |
| "loss": 3.9377, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.91683400344479e-05, |
| "loss": 3.9441, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.915995408693738e-05, |
| "loss": 3.9316, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.915156813942686e-05, |
| "loss": 3.9331, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.914318219191634e-05, |
| "loss": 3.924, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.913481262320955e-05, |
| "loss": 3.9383, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9126443054502762e-05, |
| "loss": 3.9198, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9118057106992242e-05, |
| "loss": 3.9308, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9109671159481722e-05, |
| "loss": 3.9337, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9101285211971202e-05, |
| "loss": 3.932, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9092899264460682e-05, |
| "loss": 3.924, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.908452969575389e-05, |
| "loss": 3.93, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9076143748243375e-05, |
| "loss": 3.9268, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9067757800732855e-05, |
| "loss": 3.9368, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9059371853222335e-05, |
| "loss": 3.9293, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9050985905711815e-05, |
| "loss": 3.9315, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9042599958201294e-05, |
| "loss": 3.9455, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9034214010690774e-05, |
| "loss": 3.9411, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9025828063180254e-05, |
| "loss": 3.9337, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9017442115669734e-05, |
| "loss": 3.9277, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9009072546962947e-05, |
| "loss": 3.9346, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9000686599452427e-05, |
| "loss": 3.9283, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8992317030745636e-05, |
| "loss": 3.9279, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8983931083235116e-05, |
| "loss": 3.9424, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8975545135724596e-05, |
| "loss": 3.9262, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8967159188214076e-05, |
| "loss": 3.9373, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8958773240703556e-05, |
| "loss": 3.9238, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.895038729319304e-05, |
| "loss": 3.9265, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.894200134568252e-05, |
| "loss": 3.9347, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8933615398172e-05, |
| "loss": 3.9288, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8925262208268937e-05, |
| "loss": 3.9223, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8916876260758417e-05, |
| "loss": 3.9488, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.89084903132479e-05, |
| "loss": 3.9344, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.890010436573738e-05, |
| "loss": 3.9312, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.889171841822686e-05, |
| "loss": 3.9319, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.888333247071634e-05, |
| "loss": 3.9335, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.887496290200955e-05, |
| "loss": 3.9392, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.886657695449903e-05, |
| "loss": 3.9335, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8858207385792242e-05, |
| "loss": 3.9286, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8849821438281722e-05, |
| "loss": 3.944, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8841435490771202e-05, |
| "loss": 3.9388, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8833049543260682e-05, |
| "loss": 3.9373, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8824663595750162e-05, |
| "loss": 3.931, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.881627764823964e-05, |
| "loss": 3.9389, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.880789170072912e-05, |
| "loss": 3.9352, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8799505753218598e-05, |
| "loss": 3.9341, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8791119805708078e-05, |
| "loss": 3.9335, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8782733858197558e-05, |
| "loss": 3.9315, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8774364289490774e-05, |
| "loss": 3.9394, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8765978341980247e-05, |
| "loss": 3.9233, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.875759239446973e-05, |
| "loss": 3.9282, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.013113498687744, |
| "eval_runtime": 297.6157, |
| "eval_samples_per_second": 1282.16, |
| "eval_steps_per_second": 40.068, |
| "step": 1297440 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 5.3610476916921645e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|