| { |
| "best_metric": 4.260278701782227, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-reconstruction/lstm/3/checkpoints/checkpoint-305280", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 305280, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8204, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.5463, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0572, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.992, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.9466, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.8848, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7333, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 6.6274, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 6.5278, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 6.4492, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 6.3879, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 6.3324, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 6.2709, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989098268236324e-05, |
| "loss": 6.2013, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988259673485272e-05, |
| "loss": 6.1486, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98742107873422e-05, |
| "loss": 6.086, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986582483983168e-05, |
| "loss": 6.0514, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985743889232116e-05, |
| "loss": 6.0111, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984905294481064e-05, |
| "loss": 5.9594, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984066699730012e-05, |
| "loss": 5.9279, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.8891, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 5.8571, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.981552553357229e-05, |
| "loss": 5.828, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.980713958606178e-05, |
| "loss": 5.7935, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.979875363855125e-05, |
| "loss": 5.776, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.979036769104073e-05, |
| "loss": 5.7382, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.978198174353021e-05, |
| "loss": 5.7199, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773612174823426e-05, |
| "loss": 5.6947, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.97652262273129e-05, |
| "loss": 5.6663, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.975684027980238e-05, |
| "loss": 5.6387, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974845433229186e-05, |
| "loss": 5.6242, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974006838478134e-05, |
| "loss": 5.612, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.973168243727082e-05, |
| "loss": 5.5866, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.972331286856403e-05, |
| "loss": 5.5653, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714926921053515e-05, |
| "loss": 5.5573, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706540973542995e-05, |
| "loss": 5.5454, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9698155026032475e-05, |
| "loss": 5.5198, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9689769078521955e-05, |
| "loss": 5.4864, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9681399509815164e-05, |
| "loss": 5.4823, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9673013562304644e-05, |
| "loss": 5.4489, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9664627614794124e-05, |
| "loss": 5.4544, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9656241667283604e-05, |
| "loss": 5.4369, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964787209857681e-05, |
| "loss": 5.4347, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963948615106629e-05, |
| "loss": 5.4063, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963110020355577e-05, |
| "loss": 5.4035, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962271425604525e-05, |
| "loss": 5.3896, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.961432830853473e-05, |
| "loss": 5.3818, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960595873982795e-05, |
| "loss": 5.3775, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959757279231743e-05, |
| "loss": 5.343, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958918684480691e-05, |
| "loss": 5.3416, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958080089729639e-05, |
| "loss": 5.3449, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95724313285896e-05, |
| "loss": 5.3357, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956404538107908e-05, |
| "loss": 5.3099, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955565943356856e-05, |
| "loss": 5.297, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954727348605804e-05, |
| "loss": 5.2922, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953890391735125e-05, |
| "loss": 5.2822, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530517969840727e-05, |
| "loss": 5.2933, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522132022330207e-05, |
| "loss": 5.2534, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9513746074819686e-05, |
| "loss": 5.2642, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9505360127309166e-05, |
| "loss": 5.2538, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2382, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948860461109186e-05, |
| "loss": 5.2402, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948021866358134e-05, |
| "loss": 5.2133, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947183271607082e-05, |
| "loss": 5.2084, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94634467685603e-05, |
| "loss": 5.2065, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945507719985351e-05, |
| "loss": 5.218, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944669125234299e-05, |
| "loss": 5.1881, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94383216836362e-05, |
| "loss": 5.1832, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942993573612568e-05, |
| "loss": 5.1637, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942154978861516e-05, |
| "loss": 5.1706, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941316384110464e-05, |
| "loss": 5.1721, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.940477789359412e-05, |
| "loss": 5.1709, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396408324887336e-05, |
| "loss": 5.1466, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388022377376816e-05, |
| "loss": 5.1601, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379636429866296e-05, |
| "loss": 5.1599, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371250482355776e-05, |
| "loss": 5.1345, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362864534845256e-05, |
| "loss": 5.1218, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935447858733473e-05, |
| "loss": 5.114, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.934609263982421e-05, |
| "loss": 5.1038, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337723071117425e-05, |
| "loss": 5.1139, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329337123606905e-05, |
| "loss": 5.0934, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932095117609638e-05, |
| "loss": 5.1094, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931256522858586e-05, |
| "loss": 5.1001, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930417928107534e-05, |
| "loss": 5.0839, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 5.0688, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 5.0705, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 5.0713, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 5.0748, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 5.0549, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925389635361968e-05, |
| "loss": 5.0539, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924551040610916e-05, |
| "loss": 5.0498, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923712445859864e-05, |
| "loss": 5.0342, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922873851108812e-05, |
| "loss": 5.0315, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92203525635776e-05, |
| "loss": 5.03, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921196661606708e-05, |
| "loss": 5.0216, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920359704736029e-05, |
| "loss": 5.0207, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919521109984978e-05, |
| "loss": 5.0093, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918682515233926e-05, |
| "loss": 5.0018, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917843920482874e-05, |
| "loss": 5.0083, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917005325731822e-05, |
| "loss": 5.003, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916168368861143e-05, |
| "loss": 4.9973, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915329774110091e-05, |
| "loss": 4.9909, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914491179359039e-05, |
| "loss": 4.9808, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913652584607987e-05, |
| "loss": 4.9752, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912813989856935e-05, |
| "loss": 4.9725, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911975395105883e-05, |
| "loss": 4.9504, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111384382352036e-05, |
| "loss": 4.9604, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9102998434841516e-05, |
| "loss": 4.9562, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9094612487330996e-05, |
| "loss": 4.959, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9086226539820476e-05, |
| "loss": 4.9439, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907784059230996e-05, |
| "loss": 4.9504, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906945464479944e-05, |
| "loss": 4.9265, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906106869728892e-05, |
| "loss": 4.9478, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9052682749778396e-05, |
| "loss": 4.9173, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9044296802267876e-05, |
| "loss": 4.9268, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903592723356109e-05, |
| "loss": 4.9273, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9027541286050565e-05, |
| "loss": 4.9219, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9019155338540045e-05, |
| "loss": 4.9183, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010769391029525e-05, |
| "loss": 4.9056, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9002383443519005e-05, |
| "loss": 4.9103, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8993997496008485e-05, |
| "loss": 4.9138, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985611548497965e-05, |
| "loss": 4.8967, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977225600987445e-05, |
| "loss": 4.8921, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896885603228066e-05, |
| "loss": 4.8895, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896047008477014e-05, |
| "loss": 4.9014, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.895210051606335e-05, |
| "loss": 4.8686, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.894371456855283e-05, |
| "loss": 4.8732, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.893534499984604e-05, |
| "loss": 4.8641, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892695905233552e-05, |
| "loss": 4.8732, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918573104825e-05, |
| "loss": 4.8603, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891018715731448e-05, |
| "loss": 4.8786, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890180120980396e-05, |
| "loss": 4.8672, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889341526229344e-05, |
| "loss": 4.8588, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888502931478292e-05, |
| "loss": 4.8499, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88766433672724e-05, |
| "loss": 4.8562, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886825741976188e-05, |
| "loss": 4.8531, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8859871472251365e-05, |
| "loss": 4.8521, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8851485524740845e-05, |
| "loss": 4.8394, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8843099577230325e-05, |
| "loss": 4.8502, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8834730008523534e-05, |
| "loss": 4.8381, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8826344061013014e-05, |
| "loss": 4.8359, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8817958113502494e-05, |
| "loss": 4.8194, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88095885447957e-05, |
| "loss": 4.8344, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880120259728518e-05, |
| "loss": 4.8183, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879281664977466e-05, |
| "loss": 4.8001, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878443070226414e-05, |
| "loss": 4.8267, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877606113355735e-05, |
| "loss": 4.8134, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876767518604683e-05, |
| "loss": 4.8117, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875928923853632e-05, |
| "loss": 4.8098, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87509032910258e-05, |
| "loss": 4.7957, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.76328706741333, |
| "eval_runtime": 292.912, |
| "eval_samples_per_second": 1302.75, |
| "eval_steps_per_second": 40.712, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874251734351528e-05, |
| "loss": 4.7803, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873413139600475e-05, |
| "loss": 4.7865, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.872576182729797e-05, |
| "loss": 4.8035, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871737587978745e-05, |
| "loss": 4.7888, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.870898993227693e-05, |
| "loss": 4.793, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.87006039847664e-05, |
| "loss": 4.7814, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.869221803725588e-05, |
| "loss": 4.7816, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683848468549096e-05, |
| "loss": 4.7666, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675462521038576e-05, |
| "loss": 4.7698, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667076573528056e-05, |
| "loss": 4.7717, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658690626017536e-05, |
| "loss": 4.7832, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8650304678507016e-05, |
| "loss": 4.7748, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8641935109800225e-05, |
| "loss": 4.7599, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8633549162289705e-05, |
| "loss": 4.7534, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8625163214779185e-05, |
| "loss": 4.7428, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8616777267268665e-05, |
| "loss": 4.7446, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8608391319758145e-05, |
| "loss": 4.7568, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8600005372247625e-05, |
| "loss": 4.7431, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8591619424737105e-05, |
| "loss": 4.7391, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8583233477226585e-05, |
| "loss": 4.7551, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8574863908519794e-05, |
| "loss": 4.7432, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8566477961009274e-05, |
| "loss": 4.736, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.855810839230249e-05, |
| "loss": 4.7376, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854972244479197e-05, |
| "loss": 4.7488, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854133649728145e-05, |
| "loss": 4.7214, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.853295054977093e-05, |
| "loss": 4.7251, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.852458098106414e-05, |
| "loss": 4.7337, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851619503355362e-05, |
| "loss": 4.7228, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.85078090860431e-05, |
| "loss": 4.7081, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849942313853258e-05, |
| "loss": 4.7107, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849103719102206e-05, |
| "loss": 4.7248, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.848265124351154e-05, |
| "loss": 4.7158, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847426529600102e-05, |
| "loss": 4.7116, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84658793484905e-05, |
| "loss": 4.7159, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8457526158587444e-05, |
| "loss": 4.7116, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8449140211076924e-05, |
| "loss": 4.709, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8440754263566404e-05, |
| "loss": 4.691, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8432368316055884e-05, |
| "loss": 4.696, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8423982368545363e-05, |
| "loss": 4.68, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8415596421034843e-05, |
| "loss": 4.694, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840721047352432e-05, |
| "loss": 4.69, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83988245260138e-05, |
| "loss": 4.6986, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839047133611074e-05, |
| "loss": 4.6796, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838208538860022e-05, |
| "loss": 4.6909, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83736994410897e-05, |
| "loss": 4.6812, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836531349357918e-05, |
| "loss": 4.6834, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83569439248724e-05, |
| "loss": 4.6892, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834855797736188e-05, |
| "loss": 4.6591, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834017202985136e-05, |
| "loss": 4.6642, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.833178608234084e-05, |
| "loss": 4.6829, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.832340013483032e-05, |
| "loss": 4.6742, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83150141873198e-05, |
| "loss": 4.6642, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.830662823980928e-05, |
| "loss": 4.6549, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.829824229229876e-05, |
| "loss": 4.6576, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.6468, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.6732, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273100828570926e-05, |
| "loss": 4.6411, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8264714881060406e-05, |
| "loss": 4.6603, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8256345312353615e-05, |
| "loss": 4.6497, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8247959364843095e-05, |
| "loss": 4.6426, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823957341733258e-05, |
| "loss": 4.6466, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823120384862579e-05, |
| "loss": 4.6363, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.822281790111527e-05, |
| "loss": 4.6291, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821443195360475e-05, |
| "loss": 4.6366, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820604600609423e-05, |
| "loss": 4.6525, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819766005858371e-05, |
| "loss": 4.6239, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8189274111073184e-05, |
| "loss": 4.6284, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8180888163562664e-05, |
| "loss": 4.614, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817251859485588e-05, |
| "loss": 4.6278, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816413264734535e-05, |
| "loss": 4.6399, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.815574669983483e-05, |
| "loss": 4.6336, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814736075232432e-05, |
| "loss": 4.6211, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81389748048138e-05, |
| "loss": 4.6356, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813058885730328e-05, |
| "loss": 4.6402, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812220290979276e-05, |
| "loss": 4.6196, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811381696228224e-05, |
| "loss": 4.6139, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810544739357545e-05, |
| "loss": 4.6156, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809706144606493e-05, |
| "loss": 4.6071, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808867549855441e-05, |
| "loss": 4.6165, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808028955104389e-05, |
| "loss": 4.6035, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80719199823371e-05, |
| "loss": 4.617, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8063550413630307e-05, |
| "loss": 4.6154, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8055164466119786e-05, |
| "loss": 4.6105, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804677851860927e-05, |
| "loss": 4.5933, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803839257109875e-05, |
| "loss": 4.6017, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803000662358823e-05, |
| "loss": 4.6047, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802162067607771e-05, |
| "loss": 4.6103, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801323472856719e-05, |
| "loss": 4.6037, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.800484878105667e-05, |
| "loss": 4.5975, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799647921234988e-05, |
| "loss": 4.5994, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798809326483936e-05, |
| "loss": 4.5878, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797970731732884e-05, |
| "loss": 4.5856, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797133774862205e-05, |
| "loss": 4.5948, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.796295180111153e-05, |
| "loss": 4.5851, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795456585360101e-05, |
| "loss": 4.5889, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794617990609049e-05, |
| "loss": 4.5877, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793779395857997e-05, |
| "loss": 4.5735, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792942438987319e-05, |
| "loss": 4.5867, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792103844236267e-05, |
| "loss": 4.5918, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791265249485215e-05, |
| "loss": 4.5825, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790426654734163e-05, |
| "loss": 4.5787, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7895880599831107e-05, |
| "loss": 4.57, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887494652320587e-05, |
| "loss": 4.5747, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7879125083613796e-05, |
| "loss": 4.5746, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7870739136103276e-05, |
| "loss": 4.5559, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7862369567396485e-05, |
| "loss": 4.5617, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7853983619885965e-05, |
| "loss": 4.5648, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845597672375445e-05, |
| "loss": 4.5735, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837211724864925e-05, |
| "loss": 4.5607, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782882577735441e-05, |
| "loss": 4.5651, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782043982984389e-05, |
| "loss": 4.5494, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.781205388233337e-05, |
| "loss": 4.5728, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803667934822844e-05, |
| "loss": 4.5506, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795281987312324e-05, |
| "loss": 4.5542, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.778691241860554e-05, |
| "loss": 4.5644, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777852647109502e-05, |
| "loss": 4.5597, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777014052358449e-05, |
| "loss": 4.5582, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.776177095487771e-05, |
| "loss": 4.5448, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775338500736719e-05, |
| "loss": 4.555, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774499905985666e-05, |
| "loss": 4.5614, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773661311234615e-05, |
| "loss": 4.5425, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772822716483563e-05, |
| "loss": 4.5454, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771984121732511e-05, |
| "loss": 4.5422, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771145526981459e-05, |
| "loss": 4.5561, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770306932230407e-05, |
| "loss": 4.5286, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769468337479355e-05, |
| "loss": 4.5389, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768629742728303e-05, |
| "loss": 4.5273, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767791147977251e-05, |
| "loss": 4.5408, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766952553226199e-05, |
| "loss": 4.5286, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76611559635552e-05, |
| "loss": 4.5497, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765277001604468e-05, |
| "loss": 4.5429, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764438406853416e-05, |
| "loss": 4.5343, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763599812102364e-05, |
| "loss": 4.5265, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762762855231685e-05, |
| "loss": 4.5382, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761924260480633e-05, |
| "loss": 4.5359, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610856657295813e-05, |
| "loss": 4.541, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602470709785293e-05, |
| "loss": 4.5203, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75941011410785e-05, |
| "loss": 4.5389, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758571519356798e-05, |
| "loss": 4.5301, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757732924605746e-05, |
| "loss": 4.5318, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756894329854694e-05, |
| "loss": 4.5181, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756057372984015e-05, |
| "loss": 4.5307, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755218778232963e-05, |
| "loss": 4.5188, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754380183481911e-05, |
| "loss": 4.5094, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753543226611232e-05, |
| "loss": 4.5262, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75270463186018e-05, |
| "loss": 4.523, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751866037109128e-05, |
| "loss": 4.5198, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751027442358077e-05, |
| "loss": 4.5164, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750188847607025e-05, |
| "loss": 4.5072, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.479551792144775, |
| "eval_runtime": 293.648, |
| "eval_samples_per_second": 1299.484, |
| "eval_steps_per_second": 40.61, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749350252855973e-05, |
| "loss": 4.4923, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748511658104921e-05, |
| "loss": 4.5003, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747673063353868e-05, |
| "loss": 4.5209, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746834468602816e-05, |
| "loss": 4.5089, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745995873851764e-05, |
| "loss": 4.5136, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745157279100712e-05, |
| "loss": 4.4992, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74431868434966e-05, |
| "loss": 4.5068, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.743480089598608e-05, |
| "loss": 4.4937, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.742641494847556e-05, |
| "loss": 4.4974, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741802900096504e-05, |
| "loss": 4.5044, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740964305345452e-05, |
| "loss": 4.5089, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401257105944e-05, |
| "loss": 4.5105, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392887537237216e-05, |
| "loss": 4.4918, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384501589726696e-05, |
| "loss": 4.4902, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376115642216176e-05, |
| "loss": 4.4886, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7367729694705656e-05, |
| "loss": 4.4776, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7359343747195135e-05, |
| "loss": 4.495, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7350957799684615e-05, |
| "loss": 4.4832, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7342571852174095e-05, |
| "loss": 4.4853, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733418590466357e-05, |
| "loss": 4.5042, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732579995715305e-05, |
| "loss": 4.487, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731741400964253e-05, |
| "loss": 4.4865, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730904444093574e-05, |
| "loss": 4.4901, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730065849342522e-05, |
| "loss": 4.5016, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292272545914704e-05, |
| "loss": 4.4709, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7283886598404184e-05, |
| "loss": 4.478, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7275500650893664e-05, |
| "loss": 4.487, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7267114703383144e-05, |
| "loss": 4.4817, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258728755872624e-05, |
| "loss": 4.4674, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7250342808362104e-05, |
| "loss": 4.4659, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724197323965531e-05, |
| "loss": 4.4854, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723358729214479e-05, |
| "loss": 4.483, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722520134463427e-05, |
| "loss": 4.4735, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721681539712375e-05, |
| "loss": 4.4804, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720846220722069e-05, |
| "loss": 4.4766, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720007625971017e-05, |
| "loss": 4.473, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719169031219965e-05, |
| "loss": 4.459, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718330436468914e-05, |
| "loss": 4.4702, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717493479598235e-05, |
| "loss": 4.449, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716654884847183e-05, |
| "loss": 4.4668, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715816290096131e-05, |
| "loss": 4.4636, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714977695345079e-05, |
| "loss": 4.4712, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714139100594027e-05, |
| "loss": 4.4588, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.713300505842975e-05, |
| "loss": 4.4657, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712461911091923e-05, |
| "loss": 4.463, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711623316340871e-05, |
| "loss": 4.4588, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107863594701916e-05, |
| "loss": 4.4719, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099477647191396e-05, |
| "loss": 4.4427, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091091699680876e-05, |
| "loss": 4.4503, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082705752170356e-05, |
| "loss": 4.4636, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707433618346357e-05, |
| "loss": 4.4602, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706595023595305e-05, |
| "loss": 4.4514, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705756428844253e-05, |
| "loss": 4.4418, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704917834093201e-05, |
| "loss": 4.45, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704080877222522e-05, |
| "loss": 4.4314, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70324228247147e-05, |
| "loss": 4.4624, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702403687720418e-05, |
| "loss": 4.4371, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701565092969366e-05, |
| "loss": 4.4445, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700728136098687e-05, |
| "loss": 4.4453, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699889541347635e-05, |
| "loss": 4.4368, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699050946596583e-05, |
| "loss": 4.442, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698212351845531e-05, |
| "loss": 4.4331, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6973753949748525e-05, |
| "loss": 4.4289, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6965368002238005e-05, |
| "loss": 4.4313, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6956982054727485e-05, |
| "loss": 4.4503, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6948596107216965e-05, |
| "loss": 4.4289, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6940210159706445e-05, |
| "loss": 4.4291, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6931824212195925e-05, |
| "loss": 4.4173, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69234382646854e-05, |
| "loss": 4.4307, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6915068695978614e-05, |
| "loss": 4.4414, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690669912727182e-05, |
| "loss": 4.442, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68983131797613e-05, |
| "loss": 4.4267, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688992723225078e-05, |
| "loss": 4.4432, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688154128474026e-05, |
| "loss": 4.4493, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687315533722974e-05, |
| "loss": 4.427, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686476938971923e-05, |
| "loss": 4.4171, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685639982101244e-05, |
| "loss": 4.4307, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684801387350192e-05, |
| "loss": 4.4206, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68396279259914e-05, |
| "loss": 4.4275, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683124197848087e-05, |
| "loss": 4.4153, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682285603097035e-05, |
| "loss": 4.4296, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681447008345983e-05, |
| "loss": 4.4343, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680608413594931e-05, |
| "loss": 4.423, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679769818843879e-05, |
| "loss": 4.4085, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6789328619732e-05, |
| "loss": 4.4177, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678094267222148e-05, |
| "loss": 4.4208, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677255672471097e-05, |
| "loss": 4.4291, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676417077720045e-05, |
| "loss": 4.4206, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675578482968993e-05, |
| "loss": 4.4216, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674739888217941e-05, |
| "loss": 4.4166, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673901293466889e-05, |
| "loss": 4.4095, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673062698715837e-05, |
| "loss": 4.4103, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722257418451576e-05, |
| "loss": 4.4116, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713871470941056e-05, |
| "loss": 4.411, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705485523430536e-05, |
| "loss": 4.4158, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697099575920016e-05, |
| "loss": 4.4126, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688746386016954e-05, |
| "loss": 4.3971, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6680360438506434e-05, |
| "loss": 4.4142, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667197449099592e-05, |
| "loss": 4.4195, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66635885434854e-05, |
| "loss": 4.4081, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665520259597488e-05, |
| "loss": 4.4121, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664681664846436e-05, |
| "loss": 4.3982, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663843070095384e-05, |
| "loss": 4.4058, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663004475344332e-05, |
| "loss": 4.4069, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662167518473653e-05, |
| "loss": 4.3852, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661328923722601e-05, |
| "loss": 4.3947, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660491966851922e-05, |
| "loss": 4.4023, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65965337210087e-05, |
| "loss": 4.4029, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658814777349818e-05, |
| "loss": 4.3942, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657976182598766e-05, |
| "loss": 4.4022, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657137587847714e-05, |
| "loss": 4.386, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656298993096662e-05, |
| "loss": 4.4058, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65546039834561e-05, |
| "loss": 4.3896, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6546234414749315e-05, |
| "loss": 4.3886, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6537848467238795e-05, |
| "loss": 4.4033, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529462519728275e-05, |
| "loss": 4.4008, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6521076572217755e-05, |
| "loss": 4.3983, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512690624707234e-05, |
| "loss": 4.3893, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6504321056000444e-05, |
| "loss": 4.3921, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649595148729365e-05, |
| "loss": 4.3986, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648756553978313e-05, |
| "loss": 4.3874, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647917959227261e-05, |
| "loss": 4.3909, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647079364476209e-05, |
| "loss": 4.3843, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.646240769725157e-05, |
| "loss": 4.3946, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645402174974105e-05, |
| "loss": 4.3822, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644563580223053e-05, |
| "loss": 4.3833, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643724985472001e-05, |
| "loss": 4.3703, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642886390720949e-05, |
| "loss": 4.3903, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642047795969897e-05, |
| "loss": 4.3727, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641209201218845e-05, |
| "loss": 4.3949, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640372244348166e-05, |
| "loss": 4.3916, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639533649597114e-05, |
| "loss": 4.3851, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638695054846062e-05, |
| "loss": 4.3711, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63785646009501e-05, |
| "loss": 4.3901, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637017865343958e-05, |
| "loss": 4.3862, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636180908473279e-05, |
| "loss": 4.3889, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635342313722228e-05, |
| "loss": 4.3756, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634503718971176e-05, |
| "loss": 4.3829, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633665124220124e-05, |
| "loss": 4.3852, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6328281673494446e-05, |
| "loss": 4.3861, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6319895725983926e-05, |
| "loss": 4.3731, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6311509778473406e-05, |
| "loss": 4.3787, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6303123830962886e-05, |
| "loss": 4.3765, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6294737883452366e-05, |
| "loss": 4.3643, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6286351935941846e-05, |
| "loss": 4.3815, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277982367235055e-05, |
| "loss": 4.3819, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269596419724535e-05, |
| "loss": 4.3704, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261210472214015e-05, |
| "loss": 4.3736, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252824524703495e-05, |
| "loss": 4.3632, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.343745231628418, |
| "eval_runtime": 328.2346, |
| "eval_samples_per_second": 1162.556, |
| "eval_steps_per_second": 36.331, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6244438577192975e-05, |
| "loss": 4.3582, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623605262968246e-05, |
| "loss": 4.3545, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622766668217194e-05, |
| "loss": 4.3785, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621928073466142e-05, |
| "loss": 4.3649, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621091116595463e-05, |
| "loss": 4.3757, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620252521844411e-05, |
| "loss": 4.3602, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619413927093359e-05, |
| "loss": 4.3665, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61857697022268e-05, |
| "loss": 4.3528, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617738375471628e-05, |
| "loss": 4.3638, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616899780720576e-05, |
| "loss": 4.3638, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616061185969524e-05, |
| "loss": 4.3742, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615222591218472e-05, |
| "loss": 4.3716, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614385634347793e-05, |
| "loss": 4.3559, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6135470395967415e-05, |
| "loss": 4.3548, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6127084448456895e-05, |
| "loss": 4.3529, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611869850094637e-05, |
| "loss": 4.345, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6110328932239584e-05, |
| "loss": 4.3543, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6101942984729064e-05, |
| "loss": 4.3519, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6093557037218544e-05, |
| "loss": 4.3496, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.608517108970802e-05, |
| "loss": 4.3747, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.60767851421975e-05, |
| "loss": 4.3539, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606839919468698e-05, |
| "loss": 4.3519, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606001324717646e-05, |
| "loss": 4.3597, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.605162729966594e-05, |
| "loss": 4.3659, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.604325773095915e-05, |
| "loss": 4.342, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.603487178344863e-05, |
| "loss": 4.346, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.602648583593811e-05, |
| "loss": 4.3571, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.601809988842759e-05, |
| "loss": 4.354, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.60097303197208e-05, |
| "loss": 4.3363, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600134437221028e-05, |
| "loss": 4.3369, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599297480350349e-05, |
| "loss": 4.355, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598458885599297e-05, |
| "loss": 4.3561, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597620290848245e-05, |
| "loss": 4.3498, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.596781696097193e-05, |
| "loss": 4.3503, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595944739226514e-05, |
| "loss": 4.3477, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595106144475462e-05, |
| "loss": 4.3481, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.59426754972441e-05, |
| "loss": 4.3318, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5934305928537316e-05, |
| "loss": 4.3479, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5925919981026796e-05, |
| "loss": 4.3261, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5917534033516276e-05, |
| "loss": 4.3407, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5909148086005756e-05, |
| "loss": 4.3355, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5900762138495235e-05, |
| "loss": 4.3502, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5892376190984715e-05, |
| "loss": 4.3379, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5883990243474195e-05, |
| "loss": 4.34, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5875604295963675e-05, |
| "loss": 4.3393, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5867218348453155e-05, |
| "loss": 4.342, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5858832400942635e-05, |
| "loss": 4.3474, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5850446453432115e-05, |
| "loss": 4.3233, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5842060505921595e-05, |
| "loss": 4.3271, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5833690937214804e-05, |
| "loss": 4.3436, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5825304989704284e-05, |
| "loss": 4.3392, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.581691904219377e-05, |
| "loss": 4.3292, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580853309468325e-05, |
| "loss": 4.3238, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580016352597646e-05, |
| "loss": 4.3305, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579177757846594e-05, |
| "loss": 4.3166, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.578339163095542e-05, |
| "loss": 4.34, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577502206224863e-05, |
| "loss": 4.3169, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576663611473811e-05, |
| "loss": 4.3267, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575825016722759e-05, |
| "loss": 4.3306, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574986421971707e-05, |
| "loss": 4.3197, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574147827220655e-05, |
| "loss": 4.3219, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573309232469602e-05, |
| "loss": 4.3198, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572470637718551e-05, |
| "loss": 4.312, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5716336808478725e-05, |
| "loss": 4.321, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5707950860968205e-05, |
| "loss": 4.3309, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569956491345768e-05, |
| "loss": 4.3162, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569117896594716e-05, |
| "loss": 4.3148, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.568279301843664e-05, |
| "loss": 4.3051, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.567440707092612e-05, |
| "loss": 4.3141, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.56660211234156e-05, |
| "loss": 4.3257, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.565763517590508e-05, |
| "loss": 4.3327, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564926560719829e-05, |
| "loss": 4.3141, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5640879659687767e-05, |
| "loss": 4.327, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5632493712177247e-05, |
| "loss": 4.3327, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.562412414347046e-05, |
| "loss": 4.3246, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.561573819595994e-05, |
| "loss": 4.3012, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.560735224844942e-05, |
| "loss": 4.3215, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.55989663009389e-05, |
| "loss": 4.3146, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559058035342838e-05, |
| "loss": 4.3092, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558219440591786e-05, |
| "loss": 4.3135, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557380845840734e-05, |
| "loss": 4.3139, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556543888970055e-05, |
| "loss": 4.324, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555706932099376e-05, |
| "loss": 4.3156, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554868337348324e-05, |
| "loss": 4.3013, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554029742597272e-05, |
| "loss": 4.3117, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.55319114784622e-05, |
| "loss": 4.3079, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.552352553095168e-05, |
| "loss": 4.3194, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.551513958344116e-05, |
| "loss": 4.3177, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.550675363593064e-05, |
| "loss": 4.3097, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549836768842013e-05, |
| "loss": 4.3114, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5489998119713336e-05, |
| "loss": 4.2999, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5481612172202816e-05, |
| "loss": 4.3035, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5473226224692296e-05, |
| "loss": 4.3032, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5464840277181776e-05, |
| "loss": 4.306, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5456470708474985e-05, |
| "loss": 4.3085, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5448084760964465e-05, |
| "loss": 4.3157, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5439698813453945e-05, |
| "loss": 4.2901, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5431312865943425e-05, |
| "loss": 4.3089, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5422943297236634e-05, |
| "loss": 4.3164, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5414557349726114e-05, |
| "loss": 4.3068, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5406171402215594e-05, |
| "loss": 4.3039, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.539778545470508e-05, |
| "loss": 4.2956, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538939950719456e-05, |
| "loss": 4.3027, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538102993848777e-05, |
| "loss": 4.3035, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.537266036978098e-05, |
| "loss": 4.2835, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.536427442227046e-05, |
| "loss": 4.2941, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535590485356367e-05, |
| "loss": 4.2963, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.534751890605315e-05, |
| "loss": 4.3036, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533913295854263e-05, |
| "loss": 4.2878, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533074701103211e-05, |
| "loss": 4.3073, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532236106352159e-05, |
| "loss": 4.282, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531397511601107e-05, |
| "loss": 4.3052, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.530558916850055e-05, |
| "loss": 4.2927, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5297203220990034e-05, |
| "loss": 4.2862, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5288817273479514e-05, |
| "loss": 4.3034, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528043132596899e-05, |
| "loss": 4.3031, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.52720617572622e-05, |
| "loss": 4.2978, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526367580975168e-05, |
| "loss": 4.2921, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.525530624104489e-05, |
| "loss": 4.2948, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.524692029353437e-05, |
| "loss": 4.2978, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.523853434602385e-05, |
| "loss": 4.2866, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.523014839851333e-05, |
| "loss": 4.292, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221762451002805e-05, |
| "loss": 4.2893, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213376503492285e-05, |
| "loss": 4.2924, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.520499055598177e-05, |
| "loss": 4.2888, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.519660460847125e-05, |
| "loss": 4.2883, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518821866096073e-05, |
| "loss": 4.2688, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517983271345021e-05, |
| "loss": 4.2931, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517144676593969e-05, |
| "loss": 4.2806, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51630771972329e-05, |
| "loss": 4.2966, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515470762852611e-05, |
| "loss": 4.2939, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514632168101559e-05, |
| "loss": 4.2925, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513793573350507e-05, |
| "loss": 4.2748, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512954978599455e-05, |
| "loss": 4.2949, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512116383848403e-05, |
| "loss": 4.29, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511277789097351e-05, |
| "loss": 4.2965, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510439194346299e-05, |
| "loss": 4.2771, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509600599595247e-05, |
| "loss": 4.2948, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5087636427245685e-05, |
| "loss": 4.2924, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5079250479735165e-05, |
| "loss": 4.2949, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5070864532224645e-05, |
| "loss": 4.2747, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5062478584714125e-05, |
| "loss": 4.2885, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5054109016007334e-05, |
| "loss": 4.2848, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5045723068496814e-05, |
| "loss": 4.276, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5037337120986294e-05, |
| "loss": 4.2854, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5028951173475774e-05, |
| "loss": 4.2895, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5020581604768983e-05, |
| "loss": 4.2807, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501219565725846e-05, |
| "loss": 4.2798, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.500382608855168e-05, |
| "loss": 4.278, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.260278701782227, |
| "eval_runtime": 328.791, |
| "eval_samples_per_second": 1160.588, |
| "eval_steps_per_second": 36.269, |
| "step": 305280 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.2580195707553651e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|