| { |
| "best_metric": 3.8620078563690186, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/transformer/0/checkpoints/checkpoint-915840", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1984320, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 11.0137, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8341, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.2119, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.0082, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8466, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.7464, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.6383, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.578, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4995, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.4352, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3914, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.3525, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 5.3073, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.2526, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.2188, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1802, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1439, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.1211, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0914, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0601, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.0483, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 5.015, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9955, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9687, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.9574, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790384069844466e-05, |
| "loss": 4.9285, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.9081, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8944, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8701, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 4.8457, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748470711095595e-05, |
| "loss": 4.842, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.8301, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.8169, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 4.7977, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 4.7949, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706557352346724e-05, |
| "loss": 4.7684, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969818778363994e-05, |
| "loss": 4.7601, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.732, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.7349, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.7198, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.7159, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 4.7034, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964788847738054e-05, |
| "loss": 4.7005, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963950252987002e-05, |
| "loss": 4.6799, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96311165823595e-05, |
| "loss": 4.675, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6682, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 4.6537, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 4.6592, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959758917112116e-05, |
| "loss": 4.6331, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95892359812181e-05, |
| "loss": 4.6197, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958085003370758e-05, |
| "loss": 4.6318, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957246408619706e-05, |
| "loss": 4.6105, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956407813868654e-05, |
| "loss": 4.6035, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9555692191176016e-05, |
| "loss": 4.5842, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5946, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5705, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.6013, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5635, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5753, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5672, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949700693740611e-05, |
| "loss": 4.5416, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 4.5364, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.5395, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.5214, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 4.5161, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945509357865724e-05, |
| "loss": 4.5337, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.5168, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.5045, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 4.4875, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942156616741889e-05, |
| "loss": 4.4875, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4968, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.4989, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396424703691065e-05, |
| "loss": 4.488, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388038756180545e-05, |
| "loss": 4.485, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379669187473754e-05, |
| "loss": 4.481, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371283239963234e-05, |
| "loss": 4.4761, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.936291367125644e-05, |
| "loss": 4.4636, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935452772374592e-05, |
| "loss": 4.4592, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93461417762354e-05, |
| "loss": 4.456, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933775582872488e-05, |
| "loss": 4.4445, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932936988121436e-05, |
| "loss": 4.4466, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932098393370384e-05, |
| "loss": 4.4465, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931259798619332e-05, |
| "loss": 4.4483, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930421203868281e-05, |
| "loss": 4.4316, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929584246997602e-05, |
| "loss": 4.4097, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92874565224655e-05, |
| "loss": 4.4297, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927907057495498e-05, |
| "loss": 4.4242, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927068462744446e-05, |
| "loss": 4.4232, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926231505873767e-05, |
| "loss": 4.415, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925394549003088e-05, |
| "loss": 4.4164, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924555954252036e-05, |
| "loss": 4.4042, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923717359500984e-05, |
| "loss": 4.3954, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922878764749932e-05, |
| "loss": 4.3999, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92204016999888e-05, |
| "loss": 4.3967, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921201575247828e-05, |
| "loss": 4.4024, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920362980496776e-05, |
| "loss": 4.403, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919524385745724e-05, |
| "loss": 4.3923, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918687428875045e-05, |
| "loss": 4.3752, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917848834123993e-05, |
| "loss": 4.3779, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917010239372941e-05, |
| "loss": 4.3795, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916173282502262e-05, |
| "loss": 4.3769, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91533468775121e-05, |
| "loss": 4.3654, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914496093000158e-05, |
| "loss": 4.3664, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913657498249106e-05, |
| "loss": 4.3544, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912818903498054e-05, |
| "loss": 4.3701, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911980308747002e-05, |
| "loss": 4.3401, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911143351876323e-05, |
| "loss": 4.3534, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910304757125272e-05, |
| "loss": 4.3541, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90946616237422e-05, |
| "loss": 4.3416, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908627567623168e-05, |
| "loss": 4.341, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907788972872115e-05, |
| "loss": 4.3431, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906950378121063e-05, |
| "loss": 4.3338, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906111783370011e-05, |
| "loss": 4.3426, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905273188618959e-05, |
| "loss": 4.3355, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90443623174828e-05, |
| "loss": 4.3277, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903597636997228e-05, |
| "loss": 4.332, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902759042246176e-05, |
| "loss": 4.3357, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9019220853754975e-05, |
| "loss": 4.3258, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010834906244455e-05, |
| "loss": 4.3153, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9002448958733935e-05, |
| "loss": 4.3198, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994063011223415e-05, |
| "loss": 4.32, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985693442516624e-05, |
| "loss": 4.3125, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977307495006104e-05, |
| "loss": 4.3022, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968921547495584e-05, |
| "loss": 4.31, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960535599985064e-05, |
| "loss": 4.3138, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952149652474544e-05, |
| "loss": 4.3042, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943763704964024e-05, |
| "loss": 4.3001, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935377757453504e-05, |
| "loss": 4.2906, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892700818874671e-05, |
| "loss": 4.2939, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891862224123619e-05, |
| "loss": 4.2882, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891023629372567e-05, |
| "loss": 4.3039, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890185034621515e-05, |
| "loss": 4.2945, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889346439870464e-05, |
| "loss": 4.2749, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888507845119412e-05, |
| "loss": 4.2849, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88766925036836e-05, |
| "loss": 4.2871, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886830655617308e-05, |
| "loss": 4.2812, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885993698746629e-05, |
| "loss": 4.288, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88515674187595e-05, |
| "loss": 4.2721, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884318147124898e-05, |
| "loss": 4.2796, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883479552373846e-05, |
| "loss": 4.2846, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882640957622794e-05, |
| "loss": 4.2731, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881802362871742e-05, |
| "loss": 4.2695, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88096376812069e-05, |
| "loss": 4.2705, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880125173369638e-05, |
| "loss": 4.2595, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8792882164989586e-05, |
| "loss": 4.2582, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878449621747907e-05, |
| "loss": 4.2698, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877611026996855e-05, |
| "loss": 4.2492, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876772432245803e-05, |
| "loss": 4.2542, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875935475375124e-05, |
| "loss": 4.2602, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875096880624072e-05, |
| "loss": 4.2546, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.217339515686035, |
| "eval_runtime": 302.5928, |
| "eval_samples_per_second": 1261.071, |
| "eval_steps_per_second": 39.409, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.87425828587302e-05, |
| "loss": 4.2365, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873419691121968e-05, |
| "loss": 4.2396, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.872582734251289e-05, |
| "loss": 4.2572, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871744139500237e-05, |
| "loss": 4.2404, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.870905544749185e-05, |
| "loss": 4.2518, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.870066949998133e-05, |
| "loss": 4.2248, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.869229993127454e-05, |
| "loss": 4.2424, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683913983764027e-05, |
| "loss": 4.2282, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675544415057236e-05, |
| "loss": 4.2261, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667158467546716e-05, |
| "loss": 4.229, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658772520036196e-05, |
| "loss": 4.23, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8650386572525676e-05, |
| "loss": 4.2323, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8642017003818885e-05, |
| "loss": 4.2172, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8633631056308365e-05, |
| "loss": 4.2147, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8625245108797845e-05, |
| "loss": 4.2094, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8616859161287324e-05, |
| "loss": 4.2088, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8608473213776804e-05, |
| "loss": 4.2064, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860008726626628e-05, |
| "loss": 4.2095, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8591701318755764e-05, |
| "loss": 4.207, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8583315371245244e-05, |
| "loss": 4.2304, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857494580253846e-05, |
| "loss": 4.2055, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.856655985502793e-05, |
| "loss": 4.2099, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.855817390751741e-05, |
| "loss": 4.2033, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854978796000689e-05, |
| "loss": 4.2085, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854141839130011e-05, |
| "loss": 4.1989, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.853303244378958e-05, |
| "loss": 4.1964, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.852464649627906e-05, |
| "loss": 4.1958, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851626054876854e-05, |
| "loss": 4.1888, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850787460125802e-05, |
| "loss": 4.174, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84994886537475e-05, |
| "loss": 4.1907, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849110270623698e-05, |
| "loss": 4.188, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84827331375302e-05, |
| "loss": 4.1924, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847434719001968e-05, |
| "loss": 4.1838, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846596124250916e-05, |
| "loss": 4.1868, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845757529499864e-05, |
| "loss": 4.1774, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844918934748812e-05, |
| "loss": 4.1751, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844081977878133e-05, |
| "loss": 4.1624, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843243383127081e-05, |
| "loss": 4.1712, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842404788376029e-05, |
| "loss": 4.1673, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.841566193624977e-05, |
| "loss": 4.1666, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840727598873925e-05, |
| "loss": 4.1693, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8398906420032456e-05, |
| "loss": 4.1732, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8390520472521936e-05, |
| "loss": 4.1628, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8382134525011416e-05, |
| "loss": 4.1645, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83737485775009e-05, |
| "loss": 4.1674, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836536262999038e-05, |
| "loss": 4.1537, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.835699306128359e-05, |
| "loss": 4.1654, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834860711377307e-05, |
| "loss": 4.1507, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834022116626255e-05, |
| "loss": 4.1442, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.833183521875203e-05, |
| "loss": 4.1607, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.832344927124151e-05, |
| "loss": 4.1492, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.831507970253472e-05, |
| "loss": 4.1444, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83066937550242e-05, |
| "loss": 4.1363, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.829830780751368e-05, |
| "loss": 4.1448, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.828992186000316e-05, |
| "loss": 4.1301, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.828155229129637e-05, |
| "loss": 4.1592, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273166343785856e-05, |
| "loss": 4.1345, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8264780396275336e-05, |
| "loss": 4.1497, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8256394448764816e-05, |
| "loss": 4.1489, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8248008501254296e-05, |
| "loss": 4.1292, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8239638932547505e-05, |
| "loss": 4.1241, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8231252985036985e-05, |
| "loss": 4.1357, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8222867037526465e-05, |
| "loss": 4.1229, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8214481090015945e-05, |
| "loss": 4.1193, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820609514250542e-05, |
| "loss": 4.1369, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8197725573798634e-05, |
| "loss": 4.1346, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8189339626288114e-05, |
| "loss": 4.1205, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8180953678777594e-05, |
| "loss": 4.1128, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8172567731267074e-05, |
| "loss": 4.1155, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816419816256029e-05, |
| "loss": 4.1267, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.815581221504977e-05, |
| "loss": 4.1267, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814742626753924e-05, |
| "loss": 4.1259, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813904032002872e-05, |
| "loss": 4.1263, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81306543725182e-05, |
| "loss": 4.1269, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812226842500768e-05, |
| "loss": 4.1279, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811389885630089e-05, |
| "loss": 4.109, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810551290879037e-05, |
| "loss": 4.1175, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809712696127985e-05, |
| "loss": 4.1149, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808874101376933e-05, |
| "loss": 4.1034, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808035506625881e-05, |
| "loss": 4.1143, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.807198549755203e-05, |
| "loss": 4.1157, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806359955004151e-05, |
| "loss": 4.1173, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805521360253099e-05, |
| "loss": 4.0993, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804682765502047e-05, |
| "loss": 4.0894, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8038458086313677e-05, |
| "loss": 4.1065, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8030072138803156e-05, |
| "loss": 4.1043, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8021686191292636e-05, |
| "loss": 4.1076, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8013300243782116e-05, |
| "loss": 4.1024, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8004930675075325e-05, |
| "loss": 4.1002, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7996544727564805e-05, |
| "loss": 4.1004, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7988158780054285e-05, |
| "loss": 4.0875, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7979772832543765e-05, |
| "loss": 4.0957, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7971386885033245e-05, |
| "loss": 4.096, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7963000937522725e-05, |
| "loss": 4.1047, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795461499001221e-05, |
| "loss": 4.1083, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794622904250169e-05, |
| "loss": 4.0997, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79378594737949e-05, |
| "loss": 4.0859, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792947352628438e-05, |
| "loss": 4.0889, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792108757877386e-05, |
| "loss": 4.0894, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791270163126334e-05, |
| "loss": 4.0977, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790433206255655e-05, |
| "loss": 4.0841, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.789594611504603e-05, |
| "loss": 4.0849, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.788756016753551e-05, |
| "loss": 4.077, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787917422002499e-05, |
| "loss": 4.0976, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.78708046513182e-05, |
| "loss": 4.0646, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786241870380768e-05, |
| "loss": 4.0811, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7854049135100895e-05, |
| "loss": 4.0846, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845663187590375e-05, |
| "loss": 4.0732, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837277240079855e-05, |
| "loss": 4.0748, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7828891292569335e-05, |
| "loss": 4.0841, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7820505345058815e-05, |
| "loss": 4.0723, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7812119397548295e-05, |
| "loss": 4.0773, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803733450037775e-05, |
| "loss": 4.0827, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795363881330984e-05, |
| "loss": 4.0723, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786977933820464e-05, |
| "loss": 4.0799, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7778591986309944e-05, |
| "loss": 4.0851, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777022241760315e-05, |
| "loss": 4.0744, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.776183647009263e-05, |
| "loss": 4.0669, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775345052258212e-05, |
| "loss": 4.0725, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.77450645750716e-05, |
| "loss": 4.0717, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773667862756108e-05, |
| "loss": 4.0652, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772829268005055e-05, |
| "loss": 4.0625, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771990673254003e-05, |
| "loss": 4.064, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771152078502951e-05, |
| "loss": 4.0781, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770315121632272e-05, |
| "loss": 4.0652, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76947652688122e-05, |
| "loss": 4.0667, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768637932130168e-05, |
| "loss": 4.056, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767799337379116e-05, |
| "loss": 4.0581, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766962380508437e-05, |
| "loss": 4.0553, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766123785757386e-05, |
| "loss": 4.0723, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765285191006334e-05, |
| "loss": 4.0691, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764446596255282e-05, |
| "loss": 4.0453, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7636096393846026e-05, |
| "loss": 4.0553, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7627710446335506e-05, |
| "loss": 4.0633, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7619324498824986e-05, |
| "loss": 4.0585, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610938551314466e-05, |
| "loss": 4.0663, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602568982607675e-05, |
| "loss": 4.0516, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7594183035097155e-05, |
| "loss": 4.0587, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7585797087586635e-05, |
| "loss": 4.0637, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7577411140076115e-05, |
| "loss": 4.0558, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7569041571369324e-05, |
| "loss": 4.0522, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756065562385881e-05, |
| "loss": 4.0559, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755226967634829e-05, |
| "loss": 4.0465, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754388372883777e-05, |
| "loss": 4.0463, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753551416013098e-05, |
| "loss": 4.0587, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752712821262046e-05, |
| "loss": 4.0391, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751874226510994e-05, |
| "loss": 4.0465, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751035631759942e-05, |
| "loss": 4.0503, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750198674889263e-05, |
| "loss": 4.0456, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.047860622406006, |
| "eval_runtime": 305.9529, |
| "eval_samples_per_second": 1247.221, |
| "eval_steps_per_second": 38.977, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749360080138211e-05, |
| "loss": 4.0309, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748521485387159e-05, |
| "loss": 4.0364, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747682890636107e-05, |
| "loss": 4.0555, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746844295885055e-05, |
| "loss": 4.0358, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746005701134003e-05, |
| "loss": 4.0543, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745167106382951e-05, |
| "loss": 4.0297, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.744328511631899e-05, |
| "loss": 4.0442, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434899168808475e-05, |
| "loss": 4.0274, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426513221297955e-05, |
| "loss": 4.0388, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7418127273787435e-05, |
| "loss": 4.0343, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740974132627691e-05, |
| "loss": 4.0345, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401371757570124e-05, |
| "loss": 4.0424, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739301856766706e-05, |
| "loss": 4.024, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738463262015654e-05, |
| "loss": 4.0252, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737624667264602e-05, |
| "loss": 4.0215, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73678607251355e-05, |
| "loss": 4.0194, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735947477762498e-05, |
| "loss": 4.0225, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735108883011446e-05, |
| "loss": 4.0259, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734270288260394e-05, |
| "loss": 4.0231, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733431693509343e-05, |
| "loss": 4.046, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732593098758291e-05, |
| "loss": 4.0243, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731754504007238e-05, |
| "loss": 4.031, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730915909256186e-05, |
| "loss": 4.0231, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730078952385508e-05, |
| "loss": 4.0298, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729240357634456e-05, |
| "loss": 4.0194, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728401762883403e-05, |
| "loss": 4.0189, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727563168132351e-05, |
| "loss": 4.0214, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726724573381299e-05, |
| "loss": 4.0126, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725887616510621e-05, |
| "loss": 3.9998, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725049021759568e-05, |
| "loss": 4.0134, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724210427008517e-05, |
| "loss": 4.0216, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723373470137838e-05, |
| "loss": 4.0177, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225348753867856e-05, |
| "loss": 4.0177, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7216962806357336e-05, |
| "loss": 4.0183, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7208576858846816e-05, |
| "loss": 4.0071, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7200190911336296e-05, |
| "loss": 4.0068, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191804963825775e-05, |
| "loss": 4.0014, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7183419016315255e-05, |
| "loss": 4.0025, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7175049447608465e-05, |
| "loss": 3.9989, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7166663500097945e-05, |
| "loss": 4.0029, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158277552587424e-05, |
| "loss": 4.0071, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7149891605076904e-05, |
| "loss": 4.0133, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714152203637012e-05, |
| "loss": 4.0003, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71331360888596e-05, |
| "loss": 4.0039, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712475014134908e-05, |
| "loss": 4.008, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711636419383856e-05, |
| "loss": 3.9914, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710799462513177e-05, |
| "loss": 4.0046, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709960867762125e-05, |
| "loss": 3.9971, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709122273011073e-05, |
| "loss": 3.9826, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708283678260021e-05, |
| "loss": 4.0033, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707446721389342e-05, |
| "loss": 3.9939, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70660812663829e-05, |
| "loss": 3.9892, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705769531887238e-05, |
| "loss": 3.9868, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704932575016559e-05, |
| "loss": 3.9911, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7040939802655074e-05, |
| "loss": 3.973, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032553855144554e-05, |
| "loss": 4.0012, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024167907634034e-05, |
| "loss": 3.9855, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015781960123514e-05, |
| "loss": 3.9992, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7007396012612994e-05, |
| "loss": 3.9998, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6999010065102474e-05, |
| "loss": 3.9824, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6990624117591954e-05, |
| "loss": 3.975, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698225454888516e-05, |
| "loss": 3.989, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697386860137464e-05, |
| "loss": 3.9766, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696548265386412e-05, |
| "loss": 3.9717, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69570967063536e-05, |
| "loss": 3.9926, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694872713764681e-05, |
| "loss": 3.9904, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694034119013629e-05, |
| "loss": 3.9736, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693195524262577e-05, |
| "loss": 3.969, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692356929511526e-05, |
| "loss": 3.9707, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691519972640847e-05, |
| "loss": 3.9804, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690681377889795e-05, |
| "loss": 3.9854, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689842783138743e-05, |
| "loss": 3.9819, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689004188387691e-05, |
| "loss": 3.9889, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6881672315170116e-05, |
| "loss": 3.9838, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6873286367659596e-05, |
| "loss": 3.9881, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6864916798952806e-05, |
| "loss": 3.9703, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856530851442285e-05, |
| "loss": 3.9781, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6848144903931765e-05, |
| "loss": 3.9793, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6839758956421245e-05, |
| "loss": 3.9644, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6831373008910725e-05, |
| "loss": 3.9763, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6822987061400205e-05, |
| "loss": 3.9844, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681460111388969e-05, |
| "loss": 3.9813, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6806215166379165e-05, |
| "loss": 3.9654, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679784559767238e-05, |
| "loss": 3.954, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678945965016186e-05, |
| "loss": 3.9712, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678107370265134e-05, |
| "loss": 3.9642, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6772687755140814e-05, |
| "loss": 3.9774, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676431818643403e-05, |
| "loss": 3.9688, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675594861772724e-05, |
| "loss": 3.9685, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674756267021672e-05, |
| "loss": 3.9683, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.67391767227062e-05, |
| "loss": 3.9559, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673079077519568e-05, |
| "loss": 3.9645, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.672240482768516e-05, |
| "loss": 3.9633, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.671401888017464e-05, |
| "loss": 3.9773, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670563293266412e-05, |
| "loss": 3.9814, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66972469851536e-05, |
| "loss": 3.969, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688877416446815e-05, |
| "loss": 3.9543, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668049146893629e-05, |
| "loss": 3.9646, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667210552142577e-05, |
| "loss": 3.9617, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663735952718984e-05, |
| "loss": 3.9696, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665535000520846e-05, |
| "loss": 3.9566, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664696405769794e-05, |
| "loss": 3.9632, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663857811018742e-05, |
| "loss": 3.9492, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66301921626769e-05, |
| "loss": 3.9707, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6621806215166383e-05, |
| "loss": 3.9412, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6613420267655863e-05, |
| "loss": 3.9595, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660505069894907e-05, |
| "loss": 3.9536, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659666475143855e-05, |
| "loss": 3.9568, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658827880392803e-05, |
| "loss": 3.9496, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657989285641751e-05, |
| "loss": 3.9615, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657150690890699e-05, |
| "loss": 3.9561, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65631373402002e-05, |
| "loss": 3.9526, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655475139268968e-05, |
| "loss": 3.9618, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654636544517916e-05, |
| "loss": 3.9482, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653797949766864e-05, |
| "loss": 3.9647, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652959355015812e-05, |
| "loss": 3.9705, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652122398145134e-05, |
| "loss": 3.9528, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651283803394082e-05, |
| "loss": 3.9501, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65044520864303e-05, |
| "loss": 3.9521, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649606613891978e-05, |
| "loss": 3.9519, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648768019140926e-05, |
| "loss": 3.9455, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479310622702466e-05, |
| "loss": 3.9457, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470924675191946e-05, |
| "loss": 3.9434, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462538727681426e-05, |
| "loss": 3.961, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6454152780170906e-05, |
| "loss": 3.9508, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6445766832660386e-05, |
| "loss": 3.9495, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6437380885149866e-05, |
| "loss": 3.9393, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6428994937639346e-05, |
| "loss": 3.9428, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6420625368932555e-05, |
| "loss": 3.9395, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6412239421422035e-05, |
| "loss": 3.9585, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640386985271525e-05, |
| "loss": 3.9562, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639548390520473e-05, |
| "loss": 3.9372, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638709795769421e-05, |
| "loss": 3.9365, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637871201018369e-05, |
| "loss": 3.9507, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637032606267317e-05, |
| "loss": 3.9422, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636194011516265e-05, |
| "loss": 3.9567, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6353554167652124e-05, |
| "loss": 3.9374, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634518459894534e-05, |
| "loss": 3.9507, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633679865143482e-05, |
| "loss": 3.9479, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632841270392429e-05, |
| "loss": 3.9464, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632002675641377e-05, |
| "loss": 3.9451, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631164080890326e-05, |
| "loss": 3.9449, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630325486139274e-05, |
| "loss": 3.9369, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629486891388222e-05, |
| "loss": 3.939, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628649934517543e-05, |
| "loss": 3.9463, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.627811339766491e-05, |
| "loss": 3.9357, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626972745015439e-05, |
| "loss": 3.934, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626134150264387e-05, |
| "loss": 3.9401, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625295555513335e-05, |
| "loss": 3.9375, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9721620082855225, |
| "eval_runtime": 305.1992, |
| "eval_samples_per_second": 1250.302, |
| "eval_steps_per_second": 39.073, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.624456960762283e-05, |
| "loss": 3.929, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623618366011231e-05, |
| "loss": 3.9271, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622779771260179e-05, |
| "loss": 3.9482, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6219428143895e-05, |
| "loss": 3.9286, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621104219638448e-05, |
| "loss": 3.9514, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620265624887396e-05, |
| "loss": 3.9201, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619428668016717e-05, |
| "loss": 3.9411, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.618590073265665e-05, |
| "loss": 3.926, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617751478514613e-05, |
| "loss": 3.9304, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616912883763561e-05, |
| "loss": 3.9317, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616074289012509e-05, |
| "loss": 3.9293, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615235694261457e-05, |
| "loss": 3.9365, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614400375271151e-05, |
| "loss": 3.9221, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.613561780520099e-05, |
| "loss": 3.921, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612723185769047e-05, |
| "loss": 3.9229, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611884591017995e-05, |
| "loss": 3.912, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611045996266943e-05, |
| "loss": 3.9229, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.610207401515891e-05, |
| "loss": 3.9242, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.609368806764839e-05, |
| "loss": 3.9208, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.608530212013788e-05, |
| "loss": 3.943, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6076932551431087e-05, |
| "loss": 3.9277, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6068546603920566e-05, |
| "loss": 3.9334, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6060160656410046e-05, |
| "loss": 3.9226, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6051774708899526e-05, |
| "loss": 3.9271, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6043388761389006e-05, |
| "loss": 3.9194, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6035019192682215e-05, |
| "loss": 3.9214, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6026633245171695e-05, |
| "loss": 3.9215, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6018247297661175e-05, |
| "loss": 3.9122, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6009861350150655e-05, |
| "loss": 3.9075, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600147540264013e-05, |
| "loss": 3.9117, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5993105833933344e-05, |
| "loss": 3.9246, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598471988642283e-05, |
| "loss": 3.9223, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597633393891231e-05, |
| "loss": 3.9217, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5967947991401784e-05, |
| "loss": 3.9241, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5959562043891264e-05, |
| "loss": 3.9049, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5951176096380744e-05, |
| "loss": 3.9116, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5942790148870224e-05, |
| "loss": 3.9109, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.593442058016343e-05, |
| "loss": 3.9025, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.592603463265291e-05, |
| "loss": 3.9051, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.591764868514239e-05, |
| "loss": 3.9064, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590926273763187e-05, |
| "loss": 3.9142, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590089316892508e-05, |
| "loss": 3.9179, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.589250722141457e-05, |
| "loss": 3.9097, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.588412127390405e-05, |
| "loss": 3.9066, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.587573532639353e-05, |
| "loss": 3.9161, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.586734937888301e-05, |
| "loss": 3.9027, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585897981017622e-05, |
| "loss": 3.9113, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.58505938626657e-05, |
| "loss": 3.9052, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.584220791515518e-05, |
| "loss": 3.8901, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.583382196764466e-05, |
| "loss": 3.9125, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582545239893787e-05, |
| "loss": 3.9018, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.581706645142735e-05, |
| "loss": 3.9013, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580868050391683e-05, |
| "loss": 3.891, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580029455640631e-05, |
| "loss": 3.8996, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579192498769952e-05, |
| "loss": 3.8837, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5783539040189e-05, |
| "loss": 3.9095, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577515309267848e-05, |
| "loss": 3.8985, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576676714516796e-05, |
| "loss": 3.9053, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575839757646117e-05, |
| "loss": 3.91, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575001162895065e-05, |
| "loss": 3.894, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574162568144013e-05, |
| "loss": 3.8832, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573323973392961e-05, |
| "loss": 3.904, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572485378641909e-05, |
| "loss": 3.8874, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.57164842177123e-05, |
| "loss": 3.8849, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.570809827020178e-05, |
| "loss": 3.905, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569971232269126e-05, |
| "loss": 3.9002, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569132637518074e-05, |
| "loss": 3.8874, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5682956806473956e-05, |
| "loss": 3.8861, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5674570858963436e-05, |
| "loss": 3.8805, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5666184911452916e-05, |
| "loss": 3.8895, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5657798963942396e-05, |
| "loss": 3.9009, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5649429395235605e-05, |
| "loss": 3.8985, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5641043447725085e-05, |
| "loss": 3.9004, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5632657500214565e-05, |
| "loss": 3.8943, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5624271552704045e-05, |
| "loss": 3.9078, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5615901983997254e-05, |
| "loss": 3.8829, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5607516036486734e-05, |
| "loss": 3.8962, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5599130088976214e-05, |
| "loss": 3.8919, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5590744141465694e-05, |
| "loss": 3.8785, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5582358193955174e-05, |
| "loss": 3.8968, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557398862524839e-05, |
| "loss": 3.8969, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556560267773787e-05, |
| "loss": 3.8931, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555721673022735e-05, |
| "loss": 3.8808, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554883078271683e-05, |
| "loss": 3.8741, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554046121401004e-05, |
| "loss": 3.8874, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.553207526649952e-05, |
| "loss": 3.8855, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5523689318989e-05, |
| "loss": 3.8926, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.551530337147848e-05, |
| "loss": 3.8812, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.550693380277169e-05, |
| "loss": 3.89, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549854785526117e-05, |
| "loss": 3.8823, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549016190775065e-05, |
| "loss": 3.8788, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548177596024013e-05, |
| "loss": 3.88, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5473406391533343e-05, |
| "loss": 3.8793, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5465020444022823e-05, |
| "loss": 3.8982, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.54566344965123e-05, |
| "loss": 3.8966, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544824854900178e-05, |
| "loss": 3.8929, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543986260149126e-05, |
| "loss": 3.8699, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543149303278447e-05, |
| "loss": 3.8864, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.542310708527395e-05, |
| "loss": 3.8808, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.541472113776343e-05, |
| "loss": 3.8876, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.540633519025291e-05, |
| "loss": 3.8787, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.539796562154612e-05, |
| "loss": 3.8797, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.53895796740356e-05, |
| "loss": 3.8702, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538119372652508e-05, |
| "loss": 3.8918, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.537280777901456e-05, |
| "loss": 3.8638, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.536443821030778e-05, |
| "loss": 3.8789, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535605226279726e-05, |
| "loss": 3.8774, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.534766631528674e-05, |
| "loss": 3.8762, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533928036777621e-05, |
| "loss": 3.87, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5330910799069426e-05, |
| "loss": 3.8861, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5322524851558906e-05, |
| "loss": 3.8757, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5314138904048386e-05, |
| "loss": 3.8732, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.530575295653786e-05, |
| "loss": 3.8863, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5297383387831075e-05, |
| "loss": 3.8739, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5288997440320555e-05, |
| "loss": 3.8836, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5280611492810035e-05, |
| "loss": 3.8909, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527224192410325e-05, |
| "loss": 3.8781, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526385597659273e-05, |
| "loss": 3.874, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.525547002908221e-05, |
| "loss": 3.8709, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5247084081571684e-05, |
| "loss": 3.8723, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238698134061164e-05, |
| "loss": 3.871, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5230312186550644e-05, |
| "loss": 3.8749, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221926239040124e-05, |
| "loss": 3.8611, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213540291529604e-05, |
| "loss": 3.8842, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.520517072282281e-05, |
| "loss": 3.8771, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.519678477531229e-05, |
| "loss": 3.8758, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518839882780177e-05, |
| "loss": 3.8615, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518001288029125e-05, |
| "loss": 3.8676, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517164331158447e-05, |
| "loss": 3.8656, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.516325736407395e-05, |
| "loss": 3.8782, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515487141656343e-05, |
| "loss": 3.8824, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514648546905291e-05, |
| "loss": 3.8684, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513811590034612e-05, |
| "loss": 3.8562, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51297299528356e-05, |
| "loss": 3.8758, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512134400532508e-05, |
| "loss": 3.8652, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511295805781456e-05, |
| "loss": 3.8818, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5104588489107766e-05, |
| "loss": 3.8622, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5096202541597246e-05, |
| "loss": 3.874, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5087816594086726e-05, |
| "loss": 3.8739, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5079430646576206e-05, |
| "loss": 3.8757, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507106107786942e-05, |
| "loss": 3.8717, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.50626751303589e-05, |
| "loss": 3.8691, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.505428918284838e-05, |
| "loss": 3.863, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.504590323533786e-05, |
| "loss": 3.8639, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503753366663107e-05, |
| "loss": 3.8719, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502914771912055e-05, |
| "loss": 3.8679, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502076177161003e-05, |
| "loss": 3.8601, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501237582409951e-05, |
| "loss": 3.8698, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.500400625539272e-05, |
| "loss": 3.8623, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.9303805828094482, |
| "eval_runtime": 305.2602, |
| "eval_samples_per_second": 1250.052, |
| "eval_steps_per_second": 39.065, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49956203078822e-05, |
| "loss": 3.8488, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.498723436037168e-05, |
| "loss": 3.858, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497884841286116e-05, |
| "loss": 3.8743, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497046246535065e-05, |
| "loss": 3.8585, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496207651784013e-05, |
| "loss": 3.8755, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495369057032961e-05, |
| "loss": 3.8584, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4945321001622816e-05, |
| "loss": 3.8634, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4936935054112296e-05, |
| "loss": 3.8576, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4928549106601776e-05, |
| "loss": 3.8603, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4920163159091256e-05, |
| "loss": 3.8601, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4911777211580736e-05, |
| "loss": 3.8599, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4903391264070216e-05, |
| "loss": 3.8655, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4895038074167154e-05, |
| "loss": 3.8548, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4886652126656634e-05, |
| "loss": 3.8504, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4878266179146114e-05, |
| "loss": 3.8535, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48698802316356e-05, |
| "loss": 3.8439, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486149428412508e-05, |
| "loss": 3.8526, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485310833661456e-05, |
| "loss": 3.8551, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484472238910404e-05, |
| "loss": 3.8515, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483633644159352e-05, |
| "loss": 3.8731, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4827950494082993e-05, |
| "loss": 3.8584, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481956454657247e-05, |
| "loss": 3.8635, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481117859906195e-05, |
| "loss": 3.8531, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480279265155143e-05, |
| "loss": 3.8593, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479442308284464e-05, |
| "loss": 3.8476, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478603713533412e-05, |
| "loss": 3.8588, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47776511878236e-05, |
| "loss": 3.8526, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476926524031308e-05, |
| "loss": 3.8458, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47608956716063e-05, |
| "loss": 3.8419, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475250972409578e-05, |
| "loss": 3.8435, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474412377658526e-05, |
| "loss": 3.8535, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473575420787847e-05, |
| "loss": 3.8567, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472736826036795e-05, |
| "loss": 3.8546, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471898231285743e-05, |
| "loss": 3.8557, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471059636534691e-05, |
| "loss": 3.8393, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470221041783639e-05, |
| "loss": 3.846, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469382447032587e-05, |
| "loss": 3.8449, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468543852281535e-05, |
| "loss": 3.8344, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.467705257530483e-05, |
| "loss": 3.8442, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4668683006598036e-05, |
| "loss": 3.8378, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466031343789125e-05, |
| "loss": 3.8505, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465192749038073e-05, |
| "loss": 3.8521, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464354154287021e-05, |
| "loss": 3.8457, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463515559535969e-05, |
| "loss": 3.8374, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.462676964784917e-05, |
| "loss": 3.8523, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461838370033865e-05, |
| "loss": 3.837, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460999775282813e-05, |
| "loss": 3.8484, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460161180531761e-05, |
| "loss": 3.8377, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459324223661082e-05, |
| "loss": 3.8248, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45848562891003e-05, |
| "loss": 3.8506, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457647034158978e-05, |
| "loss": 3.8385, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.456808439407926e-05, |
| "loss": 3.8374, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455971482537247e-05, |
| "loss": 3.83, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551328877861956e-05, |
| "loss": 3.83, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542942930351436e-05, |
| "loss": 3.8248, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534556982840916e-05, |
| "loss": 3.839, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4526187414134125e-05, |
| "loss": 3.8377, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4517801466623605e-05, |
| "loss": 3.8411, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4509415519113085e-05, |
| "loss": 3.8456, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4501029571602565e-05, |
| "loss": 3.8338, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4492660002895774e-05, |
| "loss": 3.8193, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4484274055385254e-05, |
| "loss": 3.8419, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4475888107874734e-05, |
| "loss": 3.8231, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4467502160364214e-05, |
| "loss": 3.8187, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445913259165742e-05, |
| "loss": 3.8454, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445074664414691e-05, |
| "loss": 3.8351, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444236069663639e-05, |
| "loss": 3.8261, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443397474912587e-05, |
| "loss": 3.8245, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442560518041908e-05, |
| "loss": 3.8155, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441721923290856e-05, |
| "loss": 3.8275, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440883328539804e-05, |
| "loss": 3.8419, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440044733788752e-05, |
| "loss": 3.8351, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439207776918073e-05, |
| "loss": 3.8365, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438369182167021e-05, |
| "loss": 3.8325, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437530587415969e-05, |
| "loss": 3.849, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436691992664917e-05, |
| "loss": 3.8236, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435855035794238e-05, |
| "loss": 3.8351, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4350164410431864e-05, |
| "loss": 3.8309, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4341778462921344e-05, |
| "loss": 3.812, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4333392515410824e-05, |
| "loss": 3.8407, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432502294670403e-05, |
| "loss": 3.8346, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431663699919351e-05, |
| "loss": 3.8319, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430825105168299e-05, |
| "loss": 3.8231, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429986510417247e-05, |
| "loss": 3.812, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429149553546568e-05, |
| "loss": 3.828, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.428310958795516e-05, |
| "loss": 3.8234, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427472364044464e-05, |
| "loss": 3.8339, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.426633769293412e-05, |
| "loss": 3.8233, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.425796812422733e-05, |
| "loss": 3.828, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424958217671682e-05, |
| "loss": 3.8221, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.42411962292063e-05, |
| "loss": 3.8243, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423281028169577e-05, |
| "loss": 3.8186, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4224440712988986e-05, |
| "loss": 3.8202, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4216054765478466e-05, |
| "loss": 3.8366, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4207668817967946e-05, |
| "loss": 3.8384, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419928287045742e-05, |
| "loss": 3.8375, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4190913301750635e-05, |
| "loss": 3.8095, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4182527354240115e-05, |
| "loss": 3.8272, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4174141406729595e-05, |
| "loss": 3.8218, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4165771838022804e-05, |
| "loss": 3.8293, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4157385890512284e-05, |
| "loss": 3.8232, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414899994300177e-05, |
| "loss": 3.819, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4140613995491244e-05, |
| "loss": 3.8156, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4132228047980724e-05, |
| "loss": 3.832, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4123842100470204e-05, |
| "loss": 3.8057, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4115456152959684e-05, |
| "loss": 3.8194, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4107070205449164e-05, |
| "loss": 3.8153, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409870063674237e-05, |
| "loss": 3.8235, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409031468923185e-05, |
| "loss": 3.8131, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408192874172133e-05, |
| "loss": 3.8264, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407354279421081e-05, |
| "loss": 3.821, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406517322550402e-05, |
| "loss": 3.8141, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405678727799351e-05, |
| "loss": 3.8296, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404840133048299e-05, |
| "loss": 3.8166, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404001538297247e-05, |
| "loss": 3.8274, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403162943546195e-05, |
| "loss": 3.8323, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402325986675516e-05, |
| "loss": 3.8183, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.401487391924464e-05, |
| "loss": 3.816, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400648797173412e-05, |
| "loss": 3.819, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39981020242236e-05, |
| "loss": 3.8135, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398971607671308e-05, |
| "loss": 3.8153, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398133012920256e-05, |
| "loss": 3.8168, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3972960560495767e-05, |
| "loss": 3.8064, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3964574612985247e-05, |
| "loss": 3.8274, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3956188665474726e-05, |
| "loss": 3.8181, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3947802717964206e-05, |
| "loss": 3.8228, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3939416770453686e-05, |
| "loss": 3.8062, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393103082294317e-05, |
| "loss": 3.8103, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392264487543265e-05, |
| "loss": 3.8117, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.391427530672586e-05, |
| "loss": 3.8216, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.390588935921534e-05, |
| "loss": 3.8245, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.389750341170482e-05, |
| "loss": 3.8153, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.38891174641943e-05, |
| "loss": 3.7998, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388073151668378e-05, |
| "loss": 3.8218, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872345569173255e-05, |
| "loss": 3.8114, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3863959621662735e-05, |
| "loss": 3.8236, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3855573674152215e-05, |
| "loss": 3.807, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3847204105445424e-05, |
| "loss": 3.8223, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383881815793491e-05, |
| "loss": 3.8188, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383043221042439e-05, |
| "loss": 3.8239, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382204626291387e-05, |
| "loss": 3.8146, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.381367669420708e-05, |
| "loss": 3.8143, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380529074669656e-05, |
| "loss": 3.8089, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.379690479918604e-05, |
| "loss": 3.8128, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378851885167552e-05, |
| "loss": 3.8173, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378014928296873e-05, |
| "loss": 3.8126, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377176333545821e-05, |
| "loss": 3.8081, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.376337738794769e-05, |
| "loss": 3.8101, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375499144043717e-05, |
| "loss": 3.8127, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9055674076080322, |
| "eval_runtime": 305.0556, |
| "eval_samples_per_second": 1250.89, |
| "eval_steps_per_second": 39.091, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.374662187173038e-05, |
| "loss": 3.8064, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3738235924219865e-05, |
| "loss": 3.8013, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3729849976709345e-05, |
| "loss": 3.8201, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3721480408002554e-05, |
| "loss": 3.8049, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3713094460492034e-05, |
| "loss": 3.8198, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3704708512981514e-05, |
| "loss": 3.8055, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.369633894427473e-05, |
| "loss": 3.8087, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36879529967642e-05, |
| "loss": 3.8078, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367956704925368e-05, |
| "loss": 3.8061, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367118110174316e-05, |
| "loss": 3.8059, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.366279515423264e-05, |
| "loss": 3.8102, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.365440920672212e-05, |
| "loss": 3.8097, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.364605601681907e-05, |
| "loss": 3.8035, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363767006930855e-05, |
| "loss": 3.7955, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362928412179803e-05, |
| "loss": 3.8039, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362089817428751e-05, |
| "loss": 3.7875, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361251222677699e-05, |
| "loss": 3.7985, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360412627926647e-05, |
| "loss": 3.8, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359574033175595e-05, |
| "loss": 3.7999, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.358735438424543e-05, |
| "loss": 3.8209, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357896843673491e-05, |
| "loss": 3.8088, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357058248922439e-05, |
| "loss": 3.8129, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.356219654171387e-05, |
| "loss": 3.802, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.355381059420335e-05, |
| "loss": 3.8047, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3545441025496556e-05, |
| "loss": 3.7979, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3537055077986036e-05, |
| "loss": 3.8056, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3528669130475516e-05, |
| "loss": 3.7979, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3520283182964996e-05, |
| "loss": 3.7972, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351191361425821e-05, |
| "loss": 3.7914, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.350352766674769e-05, |
| "loss": 3.7937, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.349514171923717e-05, |
| "loss": 3.7995, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.348677215053038e-05, |
| "loss": 3.8064, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347838620301986e-05, |
| "loss": 3.8022, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347000025550934e-05, |
| "loss": 3.807, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346161430799882e-05, |
| "loss": 3.7892, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.34532283604883e-05, |
| "loss": 3.7971, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.344484241297778e-05, |
| "loss": 3.7923, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.343645646546726e-05, |
| "loss": 3.7864, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.342807051795674e-05, |
| "loss": 3.7949, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.341970094924995e-05, |
| "loss": 3.7811, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3411331380543165e-05, |
| "loss": 3.7975, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402945433032645e-05, |
| "loss": 3.8006, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3394559485522125e-05, |
| "loss": 3.8002, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3386173538011605e-05, |
| "loss": 3.789, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3377787590501085e-05, |
| "loss": 3.803, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3369401642990565e-05, |
| "loss": 3.7836, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.336101569548004e-05, |
| "loss": 3.8005, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3352646126773254e-05, |
| "loss": 3.7887, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3344260179262734e-05, |
| "loss": 3.7686, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.333587423175221e-05, |
| "loss": 3.8043, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.332748828424169e-05, |
| "loss": 3.7885, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3319102336731174e-05, |
| "loss": 3.7938, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331073276802439e-05, |
| "loss": 3.778, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330234682051386e-05, |
| "loss": 3.7809, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.329396087300334e-05, |
| "loss": 3.7799, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.328557492549282e-05, |
| "loss": 3.7857, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32771889779823e-05, |
| "loss": 3.7916, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326880303047178e-05, |
| "loss": 3.7871, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326041708296126e-05, |
| "loss": 3.8008, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325203113545074e-05, |
| "loss": 3.7843, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324367794554768e-05, |
| "loss": 3.7715, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.323529199803716e-05, |
| "loss": 3.7932, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322690605052664e-05, |
| "loss": 3.7717, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321852010301613e-05, |
| "loss": 3.7726, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321013415550561e-05, |
| "loss": 3.7937, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320174820799509e-05, |
| "loss": 3.7877, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.319336226048457e-05, |
| "loss": 3.7764, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.318497631297405e-05, |
| "loss": 3.7774, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.317660674426726e-05, |
| "loss": 3.7704, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.316822079675674e-05, |
| "loss": 3.7778, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3159834849246217e-05, |
| "loss": 3.7934, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3151448901735697e-05, |
| "loss": 3.7876, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3143095711832635e-05, |
| "loss": 3.789, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3134709764322115e-05, |
| "loss": 3.7854, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3126323816811595e-05, |
| "loss": 3.7986, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311793786930108e-05, |
| "loss": 3.7763, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310955192179056e-05, |
| "loss": 3.7901, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310118235308377e-05, |
| "loss": 3.7838, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.309279640557325e-05, |
| "loss": 3.7643, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308441045806273e-05, |
| "loss": 3.7919, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.307602451055221e-05, |
| "loss": 3.7886, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.306763856304169e-05, |
| "loss": 3.7852, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.30592689943349e-05, |
| "loss": 3.7779, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305088304682438e-05, |
| "loss": 3.7692, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.304249709931386e-05, |
| "loss": 3.7737, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.303411115180334e-05, |
| "loss": 3.7784, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.302572520429282e-05, |
| "loss": 3.7873, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.30173392567823e-05, |
| "loss": 3.774, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.300895330927178e-05, |
| "loss": 3.7831, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3000583740564995e-05, |
| "loss": 3.7737, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2992197793054475e-05, |
| "loss": 3.7785, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2983811845543955e-05, |
| "loss": 3.7715, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2975425898033435e-05, |
| "loss": 3.7753, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2967039950522915e-05, |
| "loss": 3.7878, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2958670381816124e-05, |
| "loss": 3.7921, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2950284434305604e-05, |
| "loss": 3.7908, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2941898486795084e-05, |
| "loss": 3.7627, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2933512539284564e-05, |
| "loss": 3.7829, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2925126591774044e-05, |
| "loss": 3.7716, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291675702306725e-05, |
| "loss": 3.7861, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290837107555673e-05, |
| "loss": 3.7741, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289998512804622e-05, |
| "loss": 3.7765, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28915991805357e-05, |
| "loss": 3.7676, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.288321323302517e-05, |
| "loss": 3.7823, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.287482728551465e-05, |
| "loss": 3.7603, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.286645771680787e-05, |
| "loss": 3.7785, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.285807176929734e-05, |
| "loss": 3.7657, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284968582178682e-05, |
| "loss": 3.7796, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28412998742763e-05, |
| "loss": 3.7702, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283291392676578e-05, |
| "loss": 3.7757, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.282452797925526e-05, |
| "loss": 3.7763, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.281615841054847e-05, |
| "loss": 3.7731, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.280777246303796e-05, |
| "loss": 3.7793, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279938651552744e-05, |
| "loss": 3.7744, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279100056801692e-05, |
| "loss": 3.7766, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2782630999310126e-05, |
| "loss": 3.7908, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2774245051799606e-05, |
| "loss": 3.7752, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2765859104289086e-05, |
| "loss": 3.7702, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2757473156778566e-05, |
| "loss": 3.7761, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2749087209268046e-05, |
| "loss": 3.7715, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2740717640561255e-05, |
| "loss": 3.7635, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2732331693050735e-05, |
| "loss": 3.7756, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2723945745540215e-05, |
| "loss": 3.7616, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2715559798029695e-05, |
| "loss": 3.7815, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2707173850519175e-05, |
| "loss": 3.7763, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269880428181239e-05, |
| "loss": 3.7753, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269041833430187e-05, |
| "loss": 3.7661, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268203238679135e-05, |
| "loss": 3.7652, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267364643928083e-05, |
| "loss": 3.7696, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266526049177031e-05, |
| "loss": 3.7794, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.265689092306352e-05, |
| "loss": 3.7736, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2648504975553e-05, |
| "loss": 3.7764, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264011902804248e-05, |
| "loss": 3.7552, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.263173308053196e-05, |
| "loss": 3.7782, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.262334713302144e-05, |
| "loss": 3.7682, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.261496118551092e-05, |
| "loss": 3.7784, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26065752380004e-05, |
| "loss": 3.7639, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.259820566929361e-05, |
| "loss": 3.782, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258981972178309e-05, |
| "loss": 3.7733, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2581433774272575e-05, |
| "loss": 3.7807, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2573064205565784e-05, |
| "loss": 3.7667, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2564678258055264e-05, |
| "loss": 3.7774, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2556292310544744e-05, |
| "loss": 3.7593, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2547906363034224e-05, |
| "loss": 3.7708, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2539536794327433e-05, |
| "loss": 3.7761, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2531150846816913e-05, |
| "loss": 3.7658, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252276489930639e-05, |
| "loss": 3.77, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251437895179587e-05, |
| "loss": 3.7631, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.250599300428535e-05, |
| "loss": 3.7712, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.889169931411743, |
| "eval_runtime": 303.7698, |
| "eval_samples_per_second": 1256.185, |
| "eval_steps_per_second": 39.257, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249762343557856e-05, |
| "loss": 3.7624, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248923748806804e-05, |
| "loss": 3.7564, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248085154055753e-05, |
| "loss": 3.7732, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2472465593047e-05, |
| "loss": 3.7624, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246409602434022e-05, |
| "loss": 3.7774, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24557100768297e-05, |
| "loss": 3.7621, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244732412931918e-05, |
| "loss": 3.7655, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243893818180865e-05, |
| "loss": 3.7665, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243056861310187e-05, |
| "loss": 3.7629, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242218266559135e-05, |
| "loss": 3.7637, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241379671808083e-05, |
| "loss": 3.7656, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24054107705703e-05, |
| "loss": 3.7663, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239705758066725e-05, |
| "loss": 3.7625, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238867163315673e-05, |
| "loss": 3.7545, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238028568564621e-05, |
| "loss": 3.7621, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237189973813569e-05, |
| "loss": 3.745, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236351379062517e-05, |
| "loss": 3.7583, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235512784311465e-05, |
| "loss": 3.7582, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2346741895604125e-05, |
| "loss": 3.7558, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2338355948093605e-05, |
| "loss": 3.7796, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232998637938682e-05, |
| "loss": 3.7684, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23216004318763e-05, |
| "loss": 3.7718, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2313214484365774e-05, |
| "loss": 3.758, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2304828536855254e-05, |
| "loss": 3.7625, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2296442589344734e-05, |
| "loss": 3.7576, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228807302063795e-05, |
| "loss": 3.7608, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227968707312743e-05, |
| "loss": 3.7572, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227130112561691e-05, |
| "loss": 3.7586, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226291517810639e-05, |
| "loss": 3.7475, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225452923059587e-05, |
| "loss": 3.7532, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.224614328308535e-05, |
| "loss": 3.7557, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.223777371437856e-05, |
| "loss": 3.7677, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222938776686804e-05, |
| "loss": 3.7619, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222100181935752e-05, |
| "loss": 3.7668, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2212615871847e-05, |
| "loss": 3.7484, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220422992433648e-05, |
| "loss": 3.7525, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219584397682596e-05, |
| "loss": 3.7549, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2187474408119174e-05, |
| "loss": 3.7469, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2179088460608654e-05, |
| "loss": 3.7506, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2170702513098134e-05, |
| "loss": 3.7468, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2162316565587614e-05, |
| "loss": 3.7564, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215394699688082e-05, |
| "loss": 3.7614, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21455610493703e-05, |
| "loss": 3.7559, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.213717510185978e-05, |
| "loss": 3.7509, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212878915434926e-05, |
| "loss": 3.7589, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212040320683874e-05, |
| "loss": 3.7498, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211201725932822e-05, |
| "loss": 3.7585, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.210364769062143e-05, |
| "loss": 3.7483, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209526174311091e-05, |
| "loss": 3.7279, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208687579560039e-05, |
| "loss": 3.7704, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207848984808987e-05, |
| "loss": 3.7443, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207012027938309e-05, |
| "loss": 3.7537, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206173433187257e-05, |
| "loss": 3.7345, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.205334838436205e-05, |
| "loss": 3.7446, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204496243685153e-05, |
| "loss": 3.7341, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203659286814474e-05, |
| "loss": 3.749, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.202820692063422e-05, |
| "loss": 3.7508, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20198209731237e-05, |
| "loss": 3.7516, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2011435025613177e-05, |
| "loss": 3.7602, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2003049078102657e-05, |
| "loss": 3.745, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1994679509395866e-05, |
| "loss": 3.7324, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1986293561885346e-05, |
| "loss": 3.7576, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1977907614374826e-05, |
| "loss": 3.7336, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1969521666864306e-05, |
| "loss": 3.7292, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196115209815752e-05, |
| "loss": 3.7555, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1952766150647e-05, |
| "loss": 3.7512, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194438020313648e-05, |
| "loss": 3.7351, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193599425562596e-05, |
| "loss": 3.7405, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192762468691917e-05, |
| "loss": 3.7315, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191923873940865e-05, |
| "loss": 3.7382, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191085279189813e-05, |
| "loss": 3.7508, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902466844387603e-05, |
| "loss": 3.7494, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189409727568082e-05, |
| "loss": 3.7533, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18857113281703e-05, |
| "loss": 3.7489, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187732538065978e-05, |
| "loss": 3.7572, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186893943314926e-05, |
| "loss": 3.7348, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186055348563874e-05, |
| "loss": 3.7524, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1852183916931955e-05, |
| "loss": 3.7407, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1843797969421435e-05, |
| "loss": 3.7316, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183541202191091e-05, |
| "loss": 3.7496, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182702607440039e-05, |
| "loss": 3.7513, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181864012688987e-05, |
| "loss": 3.7459, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181027055818308e-05, |
| "loss": 3.7402, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180188461067256e-05, |
| "loss": 3.7291, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179349866316204e-05, |
| "loss": 3.7327, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178511271565152e-05, |
| "loss": 3.7424, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177674314694473e-05, |
| "loss": 3.7498, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176835719943421e-05, |
| "loss": 3.7345, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.175997125192369e-05, |
| "loss": 3.7453, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175158530441317e-05, |
| "loss": 3.7309, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174321573570638e-05, |
| "loss": 3.7434, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173482978819586e-05, |
| "loss": 3.7321, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.172644384068534e-05, |
| "loss": 3.7375, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171805789317482e-05, |
| "loss": 3.7507, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.17096719456643e-05, |
| "loss": 3.7554, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170128599815378e-05, |
| "loss": 3.7524, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.169291642944699e-05, |
| "loss": 3.7231, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.168453048193647e-05, |
| "loss": 3.744, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167614453442595e-05, |
| "loss": 3.7316, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166775858691544e-05, |
| "loss": 3.7479, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165937263940492e-05, |
| "loss": 3.7376, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1651003070698126e-05, |
| "loss": 3.7428, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1642617123187606e-05, |
| "loss": 3.7285, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1634231175677086e-05, |
| "loss": 3.7431, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1625845228166566e-05, |
| "loss": 3.7278, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1617475659459775e-05, |
| "loss": 3.7395, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1609089711949255e-05, |
| "loss": 3.7273, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1600703764438735e-05, |
| "loss": 3.7434, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1592317816928215e-05, |
| "loss": 3.7331, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1583948248221424e-05, |
| "loss": 3.7379, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1575562300710904e-05, |
| "loss": 3.735, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156717635320039e-05, |
| "loss": 3.7384, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155879040568987e-05, |
| "loss": 3.7403, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155042083698308e-05, |
| "loss": 3.736, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154203488947256e-05, |
| "loss": 3.7427, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153364894196204e-05, |
| "loss": 3.7505, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152526299445152e-05, |
| "loss": 3.7433, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151689342574473e-05, |
| "loss": 3.7339, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150850747823421e-05, |
| "loss": 3.7352, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150012153072369e-05, |
| "loss": 3.7379, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149173558321317e-05, |
| "loss": 3.7268, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148334963570265e-05, |
| "loss": 3.7388, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147498006699586e-05, |
| "loss": 3.7224, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1466594119485345e-05, |
| "loss": 3.7475, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1458208171974825e-05, |
| "loss": 3.737, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1449822224464305e-05, |
| "loss": 3.7378, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1441436276953785e-05, |
| "loss": 3.7299, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1433066708246994e-05, |
| "loss": 3.7263, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1424680760736474e-05, |
| "loss": 3.7311, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1416294813225954e-05, |
| "loss": 3.7454, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1407908865715434e-05, |
| "loss": 3.7367, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139953929700864e-05, |
| "loss": 3.7377, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139115334949812e-05, |
| "loss": 3.7191, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13827674019876e-05, |
| "loss": 3.7422, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137438145447708e-05, |
| "loss": 3.7293, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136601188577029e-05, |
| "loss": 3.7461, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.135762593825978e-05, |
| "loss": 3.7265, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134923999074926e-05, |
| "loss": 3.7425, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134085404323874e-05, |
| "loss": 3.7356, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.133248447453195e-05, |
| "loss": 3.7496, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.132409852702143e-05, |
| "loss": 3.7282, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131571257951091e-05, |
| "loss": 3.7381, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.130732663200039e-05, |
| "loss": 3.7247, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129894068448986e-05, |
| "loss": 3.7365, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290571115783076e-05, |
| "loss": 3.7387, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1282185168272556e-05, |
| "loss": 3.7276, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273799220762036e-05, |
| "loss": 3.7377, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1265413273251516e-05, |
| "loss": 3.7217, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125704370454473e-05, |
| "loss": 3.7382, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8784613609313965, |
| "eval_runtime": 305.086, |
| "eval_samples_per_second": 1250.765, |
| "eval_steps_per_second": 39.087, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.124865775703421e-05, |
| "loss": 3.7319, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1240271809523685e-05, |
| "loss": 3.7199, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1231885862013165e-05, |
| "loss": 3.7368, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1223499914502645e-05, |
| "loss": 3.7279, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.121513034579586e-05, |
| "loss": 3.7421, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1206744398285334e-05, |
| "loss": 3.7309, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1198358450774814e-05, |
| "loss": 3.7302, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1189972503264294e-05, |
| "loss": 3.732, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118160293455751e-05, |
| "loss": 3.7247, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.117321698704698e-05, |
| "loss": 3.7293, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.116483103953647e-05, |
| "loss": 3.7299, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.115644509202595e-05, |
| "loss": 3.733, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1148091902122895e-05, |
| "loss": 3.7322, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1139705954612375e-05, |
| "loss": 3.716, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1131320007101855e-05, |
| "loss": 3.7219, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1122934059591335e-05, |
| "loss": 3.715, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.111454811208081e-05, |
| "loss": 3.7198, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.110616216457029e-05, |
| "loss": 3.7279, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.109777621705977e-05, |
| "loss": 3.719, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108939026954925e-05, |
| "loss": 3.7428, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108102070084246e-05, |
| "loss": 3.7339, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.107263475333194e-05, |
| "loss": 3.7375, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1064248805821423e-05, |
| "loss": 3.7255, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1055862858310903e-05, |
| "loss": 3.7244, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.104749328960411e-05, |
| "loss": 3.7296, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103910734209359e-05, |
| "loss": 3.7229, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103072139458307e-05, |
| "loss": 3.7226, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.102233544707255e-05, |
| "loss": 3.7285, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.101394949956203e-05, |
| "loss": 3.7085, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.100557993085524e-05, |
| "loss": 3.7198, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.099719398334472e-05, |
| "loss": 3.7218, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.09888080358342e-05, |
| "loss": 3.7299, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.098042208832368e-05, |
| "loss": 3.7309, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.097205251961689e-05, |
| "loss": 3.7309, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096366657210638e-05, |
| "loss": 3.7152, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.095528062459586e-05, |
| "loss": 3.7159, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.094689467708534e-05, |
| "loss": 3.7243, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0938525108378546e-05, |
| "loss": 3.7114, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0930139160868026e-05, |
| "loss": 3.7199, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0921753213357506e-05, |
| "loss": 3.7093, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0913367265846986e-05, |
| "loss": 3.7217, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0904997697140195e-05, |
| "loss": 3.7211, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0896611749629675e-05, |
| "loss": 3.7225, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0888225802119155e-05, |
| "loss": 3.7223, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0879839854608635e-05, |
| "loss": 3.7193, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0871470285901844e-05, |
| "loss": 3.7173, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.086308433839133e-05, |
| "loss": 3.7254, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.085469839088081e-05, |
| "loss": 3.7144, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.084631244337029e-05, |
| "loss": 3.6956, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.083792649585977e-05, |
| "loss": 3.734, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082955692715298e-05, |
| "loss": 3.7101, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082117097964246e-05, |
| "loss": 3.7212, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.081278503213194e-05, |
| "loss": 3.7077, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080439908462142e-05, |
| "loss": 3.7085, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.079602951591463e-05, |
| "loss": 3.6981, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.078764356840411e-05, |
| "loss": 3.7122, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077925762089359e-05, |
| "loss": 3.7202, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077087167338307e-05, |
| "loss": 3.7155, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.076248572587255e-05, |
| "loss": 3.7271, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0754116157165764e-05, |
| "loss": 3.7185, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0745730209655244e-05, |
| "loss": 3.6945, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0737344262144724e-05, |
| "loss": 3.7193, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0728958314634204e-05, |
| "loss": 3.7025, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072058874592741e-05, |
| "loss": 3.695, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071220279841689e-05, |
| "loss": 3.7259, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.070381685090637e-05, |
| "loss": 3.7151, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069543090339585e-05, |
| "loss": 3.7014, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.068706133468906e-05, |
| "loss": 3.7095, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067867538717854e-05, |
| "loss": 3.6989, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067028943966802e-05, |
| "loss": 3.7056, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.06619034921575e-05, |
| "loss": 3.7156, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.065351754464698e-05, |
| "loss": 3.7159, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.06451479759402e-05, |
| "loss": 3.7199, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.063676202842968e-05, |
| "loss": 3.7131, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.062837608091916e-05, |
| "loss": 3.725, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.061999013340864e-05, |
| "loss": 3.7029, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.061160418589812e-05, |
| "loss": 3.722, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.060323461719133e-05, |
| "loss": 3.7065, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.059484866968081e-05, |
| "loss": 3.7018, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.058646272217029e-05, |
| "loss": 3.7163, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.057807677465976e-05, |
| "loss": 3.7189, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0569707205952976e-05, |
| "loss": 3.7136, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0561321258442456e-05, |
| "loss": 3.7121, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0552935310931936e-05, |
| "loss": 3.694, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0544549363421416e-05, |
| "loss": 3.7, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.053617979471463e-05, |
| "loss": 3.7088, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.052779384720411e-05, |
| "loss": 3.7176, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051940789969359e-05, |
| "loss": 3.705, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0511021952183065e-05, |
| "loss": 3.7136, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.050265238347628e-05, |
| "loss": 3.6993, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.049426643596576e-05, |
| "loss": 3.7115, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0485880488455234e-05, |
| "loss": 3.7022, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0477494540944714e-05, |
| "loss": 3.7056, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.046912497223793e-05, |
| "loss": 3.7173, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.046073902472741e-05, |
| "loss": 3.7197, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.045235307721689e-05, |
| "loss": 3.7229, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.044396712970637e-05, |
| "loss": 3.697, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0435597560999585e-05, |
| "loss": 3.7092, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0427211613489065e-05, |
| "loss": 3.7019, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.041882566597854e-05, |
| "loss": 3.7114, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.041043971846802e-05, |
| "loss": 3.7048, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.04020537709575e-05, |
| "loss": 3.7127, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.039368420225071e-05, |
| "loss": 3.6985, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.038529825474019e-05, |
| "loss": 3.7061, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.037691230722967e-05, |
| "loss": 3.7015, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036852635971915e-05, |
| "loss": 3.7052, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036015679101236e-05, |
| "loss": 3.6932, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035177084350184e-05, |
| "loss": 3.7129, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.034338489599132e-05, |
| "loss": 3.7044, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033501532728454e-05, |
| "loss": 3.7071, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.032662937977401e-05, |
| "loss": 3.7045, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.031824343226349e-05, |
| "loss": 3.706, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030985748475297e-05, |
| "loss": 3.7085, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030148791604618e-05, |
| "loss": 3.7049, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.029310196853566e-05, |
| "loss": 3.7098, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028471602102514e-05, |
| "loss": 3.7197, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.027633007351462e-05, |
| "loss": 3.7066, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.026796050480783e-05, |
| "loss": 3.7101, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025957455729732e-05, |
| "loss": 3.7026, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.02511886097868e-05, |
| "loss": 3.7032, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.024280266227628e-05, |
| "loss": 3.7033, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0234433093569486e-05, |
| "loss": 3.7062, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0226047146058966e-05, |
| "loss": 3.6887, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0217661198548446e-05, |
| "loss": 3.7135, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0209275251037926e-05, |
| "loss": 3.7056, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0200905682331135e-05, |
| "loss": 3.7111, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0192519734820615e-05, |
| "loss": 3.6994, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0184133787310095e-05, |
| "loss": 3.6956, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0175747839799575e-05, |
| "loss": 3.6996, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0167361892289055e-05, |
| "loss": 3.712, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015899232358227e-05, |
| "loss": 3.7056, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015060637607175e-05, |
| "loss": 3.712, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014222042856123e-05, |
| "loss": 3.687, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.013383448105071e-05, |
| "loss": 3.7114, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.012546491234392e-05, |
| "loss": 3.7004, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01170789648334e-05, |
| "loss": 3.7135, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010869301732288e-05, |
| "loss": 3.6977, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010030706981236e-05, |
| "loss": 3.7082, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009193750110557e-05, |
| "loss": 3.7045, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.008355155359505e-05, |
| "loss": 3.7201, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.007516560608453e-05, |
| "loss": 3.6994, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.006677965857401e-05, |
| "loss": 3.7072, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0058410089867224e-05, |
| "loss": 3.6983, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0050024142356704e-05, |
| "loss": 3.7046, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0041638194846184e-05, |
| "loss": 3.7021, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0033252247335664e-05, |
| "loss": 3.6986, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0024866299825144e-05, |
| "loss": 3.7082, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.001649673111835e-05, |
| "loss": 3.6885, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.000811078360783e-05, |
| "loss": 3.7136, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.871079921722412, |
| "eval_runtime": 305.6067, |
| "eval_samples_per_second": 1248.634, |
| "eval_steps_per_second": 39.021, |
| "step": 610560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999972483609731e-05, |
| "loss": 3.6932, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999133888858679e-05, |
| "loss": 3.6907, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998295294107627e-05, |
| "loss": 3.7036, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.997456699356575e-05, |
| "loss": 3.6995, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.996619742485896e-05, |
| "loss": 3.7127, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995781147734844e-05, |
| "loss": 3.6985, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994942552983792e-05, |
| "loss": 3.7015, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994103958232741e-05, |
| "loss": 3.7043, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993267001362062e-05, |
| "loss": 3.6956, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99242840661101e-05, |
| "loss": 3.7011, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991589811859958e-05, |
| "loss": 3.6989, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990751217108906e-05, |
| "loss": 3.7018, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9899158981185996e-05, |
| "loss": 3.7052, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9890773033675476e-05, |
| "loss": 3.6827, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9882387086164956e-05, |
| "loss": 3.6959, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9874001138654436e-05, |
| "loss": 3.6819, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865615191143916e-05, |
| "loss": 3.6937, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9857229243633396e-05, |
| "loss": 3.6962, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9848843296122876e-05, |
| "loss": 3.6891, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9840457348612356e-05, |
| "loss": 3.7122, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983208777990557e-05, |
| "loss": 3.706, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982370183239505e-05, |
| "loss": 3.7061, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981531588488453e-05, |
| "loss": 3.697, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980692993737401e-05, |
| "loss": 3.6965, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979856036866722e-05, |
| "loss": 3.7027, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.97901744211567e-05, |
| "loss": 3.6904, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978178847364618e-05, |
| "loss": 3.6932, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977340252613566e-05, |
| "loss": 3.7004, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976501657862514e-05, |
| "loss": 3.6826, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975664700991835e-05, |
| "loss": 3.6862, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974826106240783e-05, |
| "loss": 3.6949, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973987511489731e-05, |
| "loss": 3.7011, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973148916738679e-05, |
| "loss": 3.6941, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9723119598680005e-05, |
| "loss": 3.7054, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9714733651169485e-05, |
| "loss": 3.686, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9706347703658965e-05, |
| "loss": 3.6873, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.969796175614844e-05, |
| "loss": 3.694, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9689592187441654e-05, |
| "loss": 3.6814, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9681206239931134e-05, |
| "loss": 3.691, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9672820292420614e-05, |
| "loss": 3.6834, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966443434491009e-05, |
| "loss": 3.6876, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.96560647762033e-05, |
| "loss": 3.6931, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964767882869278e-05, |
| "loss": 3.6943, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963929288118226e-05, |
| "loss": 3.694, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963090693367174e-05, |
| "loss": 3.6901, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.962253736496496e-05, |
| "loss": 3.6901, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.961415141745444e-05, |
| "loss": 3.69, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960576546994391e-05, |
| "loss": 3.6902, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959737952243339e-05, |
| "loss": 3.6649, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958900995372661e-05, |
| "loss": 3.7013, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958062400621609e-05, |
| "loss": 3.6814, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957223805870556e-05, |
| "loss": 3.6962, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956386848999878e-05, |
| "loss": 3.6796, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955548254248826e-05, |
| "loss": 3.6767, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9547096594977737e-05, |
| "loss": 3.6739, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9538710647467217e-05, |
| "loss": 3.6785, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9530324699956696e-05, |
| "loss": 3.6939, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9521938752446176e-05, |
| "loss": 3.6817, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9513552804935656e-05, |
| "loss": 3.7006, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9505166857425136e-05, |
| "loss": 3.6902, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9496797288718345e-05, |
| "loss": 3.668, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9488411341207825e-05, |
| "loss": 3.688, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9480025393697305e-05, |
| "loss": 3.6816, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9471639446186785e-05, |
| "loss": 3.6675, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9463269877479994e-05, |
| "loss": 3.6935, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454883929969474e-05, |
| "loss": 3.686, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9446497982458954e-05, |
| "loss": 3.674, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.943811203494844e-05, |
| "loss": 3.6834, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942974246624165e-05, |
| "loss": 3.6663, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942135651873113e-05, |
| "loss": 3.6815, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.941297057122061e-05, |
| "loss": 3.6877, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.940458462371009e-05, |
| "loss": 3.6855, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.93962150550033e-05, |
| "loss": 3.6968, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.938782910749278e-05, |
| "loss": 3.6818, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937944315998226e-05, |
| "loss": 3.6967, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937105721247174e-05, |
| "loss": 3.6788, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936268764376495e-05, |
| "loss": 3.6931, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935430169625443e-05, |
| "loss": 3.6762, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934591574874391e-05, |
| "loss": 3.6732, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9337529801233395e-05, |
| "loss": 3.6874, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9329160232526604e-05, |
| "loss": 3.6941, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9320774285016084e-05, |
| "loss": 3.6838, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9312388337505564e-05, |
| "loss": 3.685, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9304002389995044e-05, |
| "loss": 3.6624, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.929563282128825e-05, |
| "loss": 3.6707, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928724687377773e-05, |
| "loss": 3.6847, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927886092626721e-05, |
| "loss": 3.6863, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927047497875669e-05, |
| "loss": 3.6744, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92621054100499e-05, |
| "loss": 3.685, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925371946253938e-05, |
| "loss": 3.6731, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924533351502886e-05, |
| "loss": 3.683, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923694756751835e-05, |
| "loss": 3.6747, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922857799881156e-05, |
| "loss": 3.677, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922019205130104e-05, |
| "loss": 3.6901, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.921180610379052e-05, |
| "loss": 3.692, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.920342015628e-05, |
| "loss": 3.6956, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9195050587573206e-05, |
| "loss": 3.6687, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9186664640062686e-05, |
| "loss": 3.6808, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9178278692552166e-05, |
| "loss": 3.6771, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9169892745041646e-05, |
| "loss": 3.6829, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9161506797531126e-05, |
| "loss": 3.6775, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9153137228824335e-05, |
| "loss": 3.6822, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9144751281313815e-05, |
| "loss": 3.675, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.91363653338033e-05, |
| "loss": 3.6761, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912797938629278e-05, |
| "loss": 3.6737, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911960981758599e-05, |
| "loss": 3.6792, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911122387007547e-05, |
| "loss": 3.6657, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910283792256495e-05, |
| "loss": 3.6839, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909445197505443e-05, |
| "loss": 3.6784, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908608240634764e-05, |
| "loss": 3.6744, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907769645883712e-05, |
| "loss": 3.6805, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90693105113266e-05, |
| "loss": 3.6748, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906092456381608e-05, |
| "loss": 3.6844, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.905255499510929e-05, |
| "loss": 3.6778, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.904416904759877e-05, |
| "loss": 3.685, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035783100088256e-05, |
| "loss": 3.6893, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9027397152577736e-05, |
| "loss": 3.6813, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9019027583870945e-05, |
| "loss": 3.6798, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9010641636360425e-05, |
| "loss": 3.6772, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9002255688849905e-05, |
| "loss": 3.674, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8993869741339385e-05, |
| "loss": 3.6765, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8985500172632594e-05, |
| "loss": 3.6786, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8977114225122074e-05, |
| "loss": 3.6611, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8968728277611554e-05, |
| "loss": 3.688, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8960342330101034e-05, |
| "loss": 3.6784, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895197276139424e-05, |
| "loss": 3.6828, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894358681388372e-05, |
| "loss": 3.6756, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89352008663732e-05, |
| "loss": 3.67, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892681491886269e-05, |
| "loss": 3.6726, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89184453501559e-05, |
| "loss": 3.686, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891005940264538e-05, |
| "loss": 3.6812, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890167345513486e-05, |
| "loss": 3.6858, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.889328750762434e-05, |
| "loss": 3.6585, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.888491793891755e-05, |
| "loss": 3.682, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.887653199140703e-05, |
| "loss": 3.6725, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.886814604389651e-05, |
| "loss": 3.688, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885976009638599e-05, |
| "loss": 3.6722, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885137414887546e-05, |
| "loss": 3.6765, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8843004580168676e-05, |
| "loss": 3.681, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8834618632658156e-05, |
| "loss": 3.6915, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882623268514764e-05, |
| "loss": 3.673, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8817846737637116e-05, |
| "loss": 3.6824, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880947716893033e-05, |
| "loss": 3.6723, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880109122141981e-05, |
| "loss": 3.6825, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8792705273909285e-05, |
| "loss": 3.6732, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8784319326398765e-05, |
| "loss": 3.6731, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8775933378888245e-05, |
| "loss": 3.6789, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876756381018146e-05, |
| "loss": 3.6669, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8759177862670934e-05, |
| "loss": 3.6834, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8677775859832764, |
| "eval_runtime": 323.4188, |
| "eval_samples_per_second": 1179.867, |
| "eval_steps_per_second": 36.872, |
| "step": 686880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8750791915160414e-05, |
| "loss": 3.6703, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8742405967649894e-05, |
| "loss": 3.663, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873403639894311e-05, |
| "loss": 3.6759, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872565045143259e-05, |
| "loss": 3.6738, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871726450392207e-05, |
| "loss": 3.6877, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870887855641155e-05, |
| "loss": 3.674, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870050898770476e-05, |
| "loss": 3.6699, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869212304019424e-05, |
| "loss": 3.6799, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.868373709268372e-05, |
| "loss": 3.6698, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86753511451732e-05, |
| "loss": 3.674, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866698157646641e-05, |
| "loss": 3.6736, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865859562895589e-05, |
| "loss": 3.6758, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865024243905283e-05, |
| "loss": 3.6779, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864185649154232e-05, |
| "loss": 3.6602, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86334705440318e-05, |
| "loss": 3.6686, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.862508459652128e-05, |
| "loss": 3.6543, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.861669864901076e-05, |
| "loss": 3.6675, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.860831270150023e-05, |
| "loss": 3.6742, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859992675398971e-05, |
| "loss": 3.6606, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859154080647919e-05, |
| "loss": 3.6852, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.858315485896867e-05, |
| "loss": 3.6803, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.857476891145815e-05, |
| "loss": 3.6772, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.856638296394763e-05, |
| "loss": 3.6733, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.855799701643711e-05, |
| "loss": 3.6711, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854962744773032e-05, |
| "loss": 3.6733, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.85412415002198e-05, |
| "loss": 3.6711, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.853285555270929e-05, |
| "loss": 3.6599, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852446960519877e-05, |
| "loss": 3.6774, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851610003649198e-05, |
| "loss": 3.6542, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850771408898146e-05, |
| "loss": 3.6637, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849932814147094e-05, |
| "loss": 3.67, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849094219396042e-05, |
| "loss": 3.6722, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8482572625253626e-05, |
| "loss": 3.6697, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8474186677743106e-05, |
| "loss": 3.677, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8465800730232586e-05, |
| "loss": 3.6648, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8457414782722066e-05, |
| "loss": 3.6599, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8449045214015275e-05, |
| "loss": 3.6714, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8440659266504755e-05, |
| "loss": 3.6544, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843227331899424e-05, |
| "loss": 3.6639, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842388737148372e-05, |
| "loss": 3.6653, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.841551780277693e-05, |
| "loss": 3.6638, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.840713185526641e-05, |
| "loss": 3.6668, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839874590775589e-05, |
| "loss": 3.6704, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839035996024537e-05, |
| "loss": 3.6697, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838197401273485e-05, |
| "loss": 3.6652, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.837360444402806e-05, |
| "loss": 3.663, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836521849651754e-05, |
| "loss": 3.6672, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.835683254900702e-05, |
| "loss": 3.6661, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.83484466014965e-05, |
| "loss": 3.6432, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.834007703278971e-05, |
| "loss": 3.675, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8331691085279195e-05, |
| "loss": 3.659, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8323305137768675e-05, |
| "loss": 3.6648, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8314935569061885e-05, |
| "loss": 3.6555, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8306549621551364e-05, |
| "loss": 3.653, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8298163674040844e-05, |
| "loss": 3.6505, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8289777726530324e-05, |
| "loss": 3.6541, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8281391779019804e-05, |
| "loss": 3.671, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8273005831509284e-05, |
| "loss": 3.6559, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8264619883998764e-05, |
| "loss": 3.6762, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8256233936488244e-05, |
| "loss": 3.6671, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824786436778145e-05, |
| "loss": 3.6414, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823947842027093e-05, |
| "loss": 3.661, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823109247276041e-05, |
| "loss": 3.6527, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822270652524989e-05, |
| "loss": 3.6471, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.821433695654311e-05, |
| "loss": 3.6657, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.820595100903259e-05, |
| "loss": 3.6603, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.819756506152207e-05, |
| "loss": 3.6513, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818917911401154e-05, |
| "loss": 3.659, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818080954530476e-05, |
| "loss": 3.6452, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817242359779424e-05, |
| "loss": 3.6502, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.816403765028371e-05, |
| "loss": 3.6651, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.815565170277319e-05, |
| "loss": 3.661, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.814728213406641e-05, |
| "loss": 3.6698, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813889618655589e-05, |
| "loss": 3.6575, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813051023904537e-05, |
| "loss": 3.6735, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.812212429153485e-05, |
| "loss": 3.6513, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.811375472282806e-05, |
| "loss": 3.6732, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.810536877531754e-05, |
| "loss": 3.6497, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8096982827807016e-05, |
| "loss": 3.6521, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8088596880296496e-05, |
| "loss": 3.6587, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808022731158971e-05, |
| "loss": 3.669, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8071841364079185e-05, |
| "loss": 3.6601, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8063455416568665e-05, |
| "loss": 3.6628, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8055069469058145e-05, |
| "loss": 3.6396, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804669990035136e-05, |
| "loss": 3.6432, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8038313952840834e-05, |
| "loss": 3.6626, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802992800533032e-05, |
| "loss": 3.6597, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.80215420578198e-05, |
| "loss": 3.6514, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8013172489113016e-05, |
| "loss": 3.6598, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800478654160249e-05, |
| "loss": 3.6555, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799640059409197e-05, |
| "loss": 3.6535, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798801464658145e-05, |
| "loss": 3.6505, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797964507787466e-05, |
| "loss": 3.6511, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797125913036414e-05, |
| "loss": 3.6668, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796287318285362e-05, |
| "loss": 3.6686, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79544872353431e-05, |
| "loss": 3.6691, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.794611766663631e-05, |
| "loss": 3.6454, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793773171912579e-05, |
| "loss": 3.6583, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7929345771615274e-05, |
| "loss": 3.6517, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7920959824104754e-05, |
| "loss": 3.6597, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.791259025539796e-05, |
| "loss": 3.655, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.790420430788744e-05, |
| "loss": 3.6545, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.789581836037692e-05, |
| "loss": 3.653, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78874324128664e-05, |
| "loss": 3.6501, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787906284415961e-05, |
| "loss": 3.6518, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787067689664909e-05, |
| "loss": 3.6541, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.786229094913857e-05, |
| "loss": 3.6449, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785390500162805e-05, |
| "loss": 3.6586, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.784553543292126e-05, |
| "loss": 3.6534, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.783714948541074e-05, |
| "loss": 3.65, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782876353790023e-05, |
| "loss": 3.6536, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782039396919344e-05, |
| "loss": 3.6509, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781200802168292e-05, |
| "loss": 3.6597, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78036220741724e-05, |
| "loss": 3.6526, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.779523612666188e-05, |
| "loss": 3.6615, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7786866557955086e-05, |
| "loss": 3.6647, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7778480610444566e-05, |
| "loss": 3.6585, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7770094662934046e-05, |
| "loss": 3.6547, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7761708715423526e-05, |
| "loss": 3.6571, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7753322767913006e-05, |
| "loss": 3.6487, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7744953199206215e-05, |
| "loss": 3.6543, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7736567251695695e-05, |
| "loss": 3.6518, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.772818130418518e-05, |
| "loss": 3.6398, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771979535667466e-05, |
| "loss": 3.6639, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771142578796787e-05, |
| "loss": 3.657, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.770303984045735e-05, |
| "loss": 3.6574, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.769465389294683e-05, |
| "loss": 3.6509, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768626794543631e-05, |
| "loss": 3.6425, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767789837672952e-05, |
| "loss": 3.6489, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7669512429219e-05, |
| "loss": 3.6607, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766112648170848e-05, |
| "loss": 3.6562, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765274053419796e-05, |
| "loss": 3.6636, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.764437096549117e-05, |
| "loss": 3.6355, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.763598501798065e-05, |
| "loss": 3.6622, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7627599070470135e-05, |
| "loss": 3.6487, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7619213122959615e-05, |
| "loss": 3.6618, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7610843554252824e-05, |
| "loss": 3.6506, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7602457606742304e-05, |
| "loss": 3.6472, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7594071659231784e-05, |
| "loss": 3.6567, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7585685711721264e-05, |
| "loss": 3.6715, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.757731614301447e-05, |
| "loss": 3.6486, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756893019550395e-05, |
| "loss": 3.66, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756054424799343e-05, |
| "loss": 3.648, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755215830048291e-05, |
| "loss": 3.6551, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.754378873177612e-05, |
| "loss": 3.6478, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75354027842656e-05, |
| "loss": 3.651, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.752701683675509e-05, |
| "loss": 3.6547, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751863088924457e-05, |
| "loss": 3.6444, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751026132053778e-05, |
| "loss": 3.662, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.865143299102783, |
| "eval_runtime": 331.6266, |
| "eval_samples_per_second": 1150.664, |
| "eval_steps_per_second": 35.959, |
| "step": 763200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.750187537302726e-05, |
| "loss": 3.6419, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.749348942551674e-05, |
| "loss": 3.6391, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.748510347800622e-05, |
| "loss": 3.6515, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.74767175304957e-05, |
| "loss": 3.6506, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.746834796178891e-05, |
| "loss": 3.6645, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.745996201427839e-05, |
| "loss": 3.6494, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7451592445571596e-05, |
| "loss": 3.6466, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7443206498061076e-05, |
| "loss": 3.6591, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7434820550550556e-05, |
| "loss": 3.6454, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.742643460304004e-05, |
| "loss": 3.6434, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.741804865552952e-05, |
| "loss": 3.6552, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7409662708019e-05, |
| "loss": 3.652, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740129313931221e-05, |
| "loss": 3.6572, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.739290719180169e-05, |
| "loss": 3.6366, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.738452124429117e-05, |
| "loss": 3.6429, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.737613529678065e-05, |
| "loss": 3.6307, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.736774934927013e-05, |
| "loss": 3.6462, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735936340175961e-05, |
| "loss": 3.6515, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735097745424909e-05, |
| "loss": 3.6355, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7342591506738564e-05, |
| "loss": 3.6614, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.733422193803178e-05, |
| "loss": 3.6601, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.732583599052126e-05, |
| "loss": 3.6539, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.731745004301074e-05, |
| "loss": 3.6514, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.730906409550022e-05, |
| "loss": 3.6461, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7300694526793436e-05, |
| "loss": 3.6523, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7292308579282916e-05, |
| "loss": 3.6472, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.728392263177239e-05, |
| "loss": 3.639, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.727553668426187e-05, |
| "loss": 3.6547, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726715073675135e-05, |
| "loss": 3.6343, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7258781168044565e-05, |
| "loss": 3.6371, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.725039522053404e-05, |
| "loss": 3.6482, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724200927302352e-05, |
| "loss": 3.6476, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7233623325513e-05, |
| "loss": 3.6513, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7225253756806214e-05, |
| "loss": 3.6527, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7216867809295694e-05, |
| "loss": 3.6396, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7208481861785174e-05, |
| "loss": 3.6418, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7200095914274654e-05, |
| "loss": 3.6455, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719172634556786e-05, |
| "loss": 3.632, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.718334039805734e-05, |
| "loss": 3.6404, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.717495445054682e-05, |
| "loss": 3.6414, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71665685030363e-05, |
| "loss": 3.6363, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.715819893432951e-05, |
| "loss": 3.647, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714981298681899e-05, |
| "loss": 3.652, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714142703930847e-05, |
| "loss": 3.6429, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.713304109179795e-05, |
| "loss": 3.6436, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.712465514428743e-05, |
| "loss": 3.6387, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.711626919677691e-05, |
| "loss": 3.6446, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71078832492664e-05, |
| "loss": 3.6409, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709951368055961e-05, |
| "loss": 3.623, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709112773304909e-05, |
| "loss": 3.6512, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.708274178553857e-05, |
| "loss": 3.6343, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.707435583802805e-05, |
| "loss": 3.6459, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7065986269321256e-05, |
| "loss": 3.6358, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7057600321810736e-05, |
| "loss": 3.6264, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7049214374300216e-05, |
| "loss": 3.6266, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7040828426789696e-05, |
| "loss": 3.6301, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7032442479279176e-05, |
| "loss": 3.6505, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7024056531768656e-05, |
| "loss": 3.6318, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7015670584258136e-05, |
| "loss": 3.6541, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7007284636747616e-05, |
| "loss": 3.6457, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699891506804083e-05, |
| "loss": 3.6214, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699052912053031e-05, |
| "loss": 3.6376, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698214317301979e-05, |
| "loss": 3.6311, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.697375722550927e-05, |
| "loss": 3.6249, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.696538765680248e-05, |
| "loss": 3.6418, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.695700170929196e-05, |
| "loss": 3.6408, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694861576178144e-05, |
| "loss": 3.6265, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694022981427092e-05, |
| "loss": 3.6401, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.693186024556413e-05, |
| "loss": 3.6213, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.692347429805361e-05, |
| "loss": 3.6307, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.691508835054309e-05, |
| "loss": 3.6381, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.690670240303257e-05, |
| "loss": 3.6419, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6898332834325786e-05, |
| "loss": 3.6441, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6889946886815266e-05, |
| "loss": 3.6371, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6881560939304746e-05, |
| "loss": 3.6499, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6873174991794226e-05, |
| "loss": 3.6316, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6864805423087435e-05, |
| "loss": 3.6498, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6856419475576915e-05, |
| "loss": 3.6271, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6848033528066395e-05, |
| "loss": 3.6318, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683964758055587e-05, |
| "loss": 3.6366, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6831278011849084e-05, |
| "loss": 3.6427, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6822892064338564e-05, |
| "loss": 3.6396, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6814506116828044e-05, |
| "loss": 3.6399, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6806120169317523e-05, |
| "loss": 3.6226, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679775060061074e-05, |
| "loss": 3.6186, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678936465310022e-05, |
| "loss": 3.6417, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.67809787055897e-05, |
| "loss": 3.6373, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677259275807917e-05, |
| "loss": 3.6302, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.676422318937239e-05, |
| "loss": 3.6376, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.675583724186187e-05, |
| "loss": 3.6324, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.674745129435134e-05, |
| "loss": 3.6313, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673906534684082e-05, |
| "loss": 3.6322, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673069577813404e-05, |
| "loss": 3.6292, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672230983062352e-05, |
| "loss": 3.6442, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6713923883113e-05, |
| "loss": 3.648, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.670553793560248e-05, |
| "loss": 3.6457, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.669716836689569e-05, |
| "loss": 3.6217, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668878241938517e-05, |
| "loss": 3.6331, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6680396471874646e-05, |
| "loss": 3.6317, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6672010524364126e-05, |
| "loss": 3.6342, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.666364095565734e-05, |
| "loss": 3.635, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6655255008146815e-05, |
| "loss": 3.6317, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6646869060636295e-05, |
| "loss": 3.633, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6638483113125775e-05, |
| "loss": 3.6253, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663011354441899e-05, |
| "loss": 3.6322, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6621727596908464e-05, |
| "loss": 3.6312, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.661334164939795e-05, |
| "loss": 3.6268, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.660495570188743e-05, |
| "loss": 3.636, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.659658613318065e-05, |
| "loss": 3.6305, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.658820018567012e-05, |
| "loss": 3.6276, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65798142381596e-05, |
| "loss": 3.6346, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657142829064908e-05, |
| "loss": 3.6255, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656305872194229e-05, |
| "loss": 3.6382, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655467277443177e-05, |
| "loss": 3.634, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.654628682692125e-05, |
| "loss": 3.6341, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653790087941073e-05, |
| "loss": 3.646, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652953131070394e-05, |
| "loss": 3.6374, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652114536319342e-05, |
| "loss": 3.6311, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65127594156829e-05, |
| "loss": 3.6368, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6504373468172384e-05, |
| "loss": 3.6264, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6496003899465594e-05, |
| "loss": 3.6355, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6487617951955074e-05, |
| "loss": 3.6274, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6479232004444554e-05, |
| "loss": 3.6198, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6470846056934033e-05, |
| "loss": 3.6408, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.646247648822724e-05, |
| "loss": 3.6379, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.645409054071672e-05, |
| "loss": 3.6343, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.64457045932062e-05, |
| "loss": 3.6302, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.643731864569568e-05, |
| "loss": 3.6242, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642894907698889e-05, |
| "loss": 3.625, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642056312947837e-05, |
| "loss": 3.641, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.641217718196785e-05, |
| "loss": 3.6341, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.640379123445734e-05, |
| "loss": 3.6409, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.639542166575055e-05, |
| "loss": 3.6133, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638703571824003e-05, |
| "loss": 3.6441, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637864977072951e-05, |
| "loss": 3.6254, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637026382321899e-05, |
| "loss": 3.6449, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6361894254512196e-05, |
| "loss": 3.6272, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6353508307001676e-05, |
| "loss": 3.6259, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6345122359491156e-05, |
| "loss": 3.6346, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6336736411980636e-05, |
| "loss": 3.6525, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6328366843273845e-05, |
| "loss": 3.6249, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6319980895763325e-05, |
| "loss": 3.6404, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6311594948252805e-05, |
| "loss": 3.6217, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.630320900074229e-05, |
| "loss": 3.6363, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.62948394320355e-05, |
| "loss": 3.6252, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.628645348452498e-05, |
| "loss": 3.6337, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.627806753701446e-05, |
| "loss": 3.6324, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.626968158950394e-05, |
| "loss": 3.6262, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.626131202079715e-05, |
| "loss": 3.6372, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8622825145721436, |
| "eval_runtime": 312.564, |
| "eval_samples_per_second": 1220.841, |
| "eval_steps_per_second": 38.152, |
| "step": 839520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.625292607328663e-05, |
| "loss": 3.6294, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.624454012577611e-05, |
| "loss": 3.6171, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.623615417826559e-05, |
| "loss": 3.6322, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.62277846095588e-05, |
| "loss": 3.6282, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.621939866204828e-05, |
| "loss": 3.6434, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.621101271453776e-05, |
| "loss": 3.6295, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6202626767027245e-05, |
| "loss": 3.6297, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6194257198320455e-05, |
| "loss": 3.6342, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6185871250809935e-05, |
| "loss": 3.6255, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6177485303299415e-05, |
| "loss": 3.6218, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6169099355788894e-05, |
| "loss": 3.6358, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6160729787082104e-05, |
| "loss": 3.6293, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615236021837531e-05, |
| "loss": 3.6394, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.614397427086479e-05, |
| "loss": 3.6165, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.613558832335427e-05, |
| "loss": 3.6193, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.612720237584375e-05, |
| "loss": 3.6162, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611881642833323e-05, |
| "loss": 3.6266, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611043048082271e-05, |
| "loss": 3.6243, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.61020445333122e-05, |
| "loss": 3.6192, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.609365858580168e-05, |
| "loss": 3.6344, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.608528901709489e-05, |
| "loss": 3.6438, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.607690306958437e-05, |
| "loss": 3.6319, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.606851712207385e-05, |
| "loss": 3.6292, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.606013117456333e-05, |
| "loss": 3.6275, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605176160585654e-05, |
| "loss": 3.6334, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.604337565834602e-05, |
| "loss": 3.6236, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.60349897108355e-05, |
| "loss": 3.6196, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.602660376332498e-05, |
| "loss": 3.6346, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6018234194618186e-05, |
| "loss": 3.612, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6009848247107666e-05, |
| "loss": 3.6151, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.600146229959715e-05, |
| "loss": 3.6286, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.599307635208663e-05, |
| "loss": 3.6271, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.598469040457611e-05, |
| "loss": 3.6317, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.597632083586932e-05, |
| "loss": 3.6312, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59679348883588e-05, |
| "loss": 3.6203, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595954894084828e-05, |
| "loss": 3.623, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595116299333776e-05, |
| "loss": 3.6223, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.594279342463097e-05, |
| "loss": 3.6142, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.593440747712045e-05, |
| "loss": 3.6226, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.592602152960993e-05, |
| "loss": 3.6165, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.591765196090314e-05, |
| "loss": 3.6165, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.590926601339262e-05, |
| "loss": 3.6232, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5900880065882106e-05, |
| "loss": 3.6353, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5892494118371586e-05, |
| "loss": 3.6184, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5884124549664796e-05, |
| "loss": 3.6258, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5875738602154276e-05, |
| "loss": 3.6233, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5867352654643755e-05, |
| "loss": 3.6203, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5858966707133235e-05, |
| "loss": 3.62, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5850597138426445e-05, |
| "loss": 3.6021, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5842211190915924e-05, |
| "loss": 3.6272, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5833825243405404e-05, |
| "loss": 3.6185, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5825439295894884e-05, |
| "loss": 3.625, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5817069727188093e-05, |
| "loss": 3.6173, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5808683779677573e-05, |
| "loss": 3.6062, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.580029783216706e-05, |
| "loss": 3.6061, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579191188465654e-05, |
| "loss": 3.6072, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.578352593714602e-05, |
| "loss": 3.6316, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.577513998963549e-05, |
| "loss": 3.6142, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576675404212497e-05, |
| "loss": 3.634, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.575836809461445e-05, |
| "loss": 3.6224, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574999852590767e-05, |
| "loss": 3.6047, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574161257839714e-05, |
| "loss": 3.616, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.573322663088662e-05, |
| "loss": 3.6098, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57248406833761e-05, |
| "loss": 3.6066, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.571647111466931e-05, |
| "loss": 3.6217, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57080851671588e-05, |
| "loss": 3.6212, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569969921964828e-05, |
| "loss": 3.6083, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569131327213776e-05, |
| "loss": 3.619, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.568294370343097e-05, |
| "loss": 3.5976, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.567455775592045e-05, |
| "loss": 3.6118, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.566617180840993e-05, |
| "loss": 3.6179, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.565778586089941e-05, |
| "loss": 3.6224, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5649416292192616e-05, |
| "loss": 3.6226, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5641030344682096e-05, |
| "loss": 3.6163, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5632644397171576e-05, |
| "loss": 3.6299, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5624258449661056e-05, |
| "loss": 3.6141, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5615888880954265e-05, |
| "loss": 3.6283, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5607502933443745e-05, |
| "loss": 3.6099, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559911698593323e-05, |
| "loss": 3.6103, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559073103842271e-05, |
| "loss": 3.6153, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558236146971592e-05, |
| "loss": 3.6215, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55739755222054e-05, |
| "loss": 3.6165, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.556558957469488e-05, |
| "loss": 3.6247, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.555720362718436e-05, |
| "loss": 3.6039, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554883405847757e-05, |
| "loss": 3.5984, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554044811096705e-05, |
| "loss": 3.6179, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553206216345653e-05, |
| "loss": 3.6196, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.552367621594601e-05, |
| "loss": 3.61, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.551530664723922e-05, |
| "loss": 3.6184, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55069206997287e-05, |
| "loss": 3.6123, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5498534752218185e-05, |
| "loss": 3.6132, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5490148804707665e-05, |
| "loss": 3.612, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5481779236000874e-05, |
| "loss": 3.6102, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5473393288490354e-05, |
| "loss": 3.622, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5465007340979834e-05, |
| "loss": 3.629, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5456621393469314e-05, |
| "loss": 3.626, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.544825182476252e-05, |
| "loss": 3.6022, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5439865877252e-05, |
| "loss": 3.6158, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.543147992974148e-05, |
| "loss": 3.6092, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.542309398223096e-05, |
| "loss": 3.6168, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.541472441352417e-05, |
| "loss": 3.6167, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.540633846601365e-05, |
| "loss": 3.613, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.539795251850314e-05, |
| "loss": 3.6121, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538956657099262e-05, |
| "loss": 3.6083, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538119700228583e-05, |
| "loss": 3.6106, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.537281105477531e-05, |
| "loss": 3.6118, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.536442510726479e-05, |
| "loss": 3.6065, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.535603915975427e-05, |
| "loss": 3.6148, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.534766959104748e-05, |
| "loss": 3.6138, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533928364353696e-05, |
| "loss": 3.6074, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533089769602644e-05, |
| "loss": 3.6122, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532251174851592e-05, |
| "loss": 3.6097, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5314142179809126e-05, |
| "loss": 3.618, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5305756232298606e-05, |
| "loss": 3.6155, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.529737028478809e-05, |
| "loss": 3.6123, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528898433727757e-05, |
| "loss": 3.6242, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528061476857078e-05, |
| "loss": 3.6233, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527222882106026e-05, |
| "loss": 3.6109, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.526384287354974e-05, |
| "loss": 3.6192, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.525545692603922e-05, |
| "loss": 3.6054, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.524708735733243e-05, |
| "loss": 3.6152, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.523870140982191e-05, |
| "loss": 3.6077, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.523031546231139e-05, |
| "loss": 3.5998, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522192951480087e-05, |
| "loss": 3.6208, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.521355994609408e-05, |
| "loss": 3.6163, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.520517399858356e-05, |
| "loss": 3.6153, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5196788051073046e-05, |
| "loss": 3.6129, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5188402103562526e-05, |
| "loss": 3.6028, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5180032534855735e-05, |
| "loss": 3.6022, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5171646587345215e-05, |
| "loss": 3.621, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5163260639834695e-05, |
| "loss": 3.6151, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5154874692324175e-05, |
| "loss": 3.6216, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5146505123617384e-05, |
| "loss": 3.599, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5138119176106864e-05, |
| "loss": 3.6146, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5129733228596344e-05, |
| "loss": 3.6121, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5121347281085824e-05, |
| "loss": 3.6194, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.511297771237903e-05, |
| "loss": 3.609, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.510459176486851e-05, |
| "loss": 3.6068, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5096205817358e-05, |
| "loss": 3.6163, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.508781986984748e-05, |
| "loss": 3.6285, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.507945030114069e-05, |
| "loss": 3.609, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.507106435363017e-05, |
| "loss": 3.6186, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.506267840611965e-05, |
| "loss": 3.6042, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.505429245860913e-05, |
| "loss": 3.6146, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.504592288990234e-05, |
| "loss": 3.6087, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.503753694239182e-05, |
| "loss": 3.6106, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.50291509948813e-05, |
| "loss": 3.6126, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502076504737078e-05, |
| "loss": 3.6125, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501239547866399e-05, |
| "loss": 3.6129, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8620078563690186, |
| "eval_runtime": 312.5598, |
| "eval_samples_per_second": 1220.857, |
| "eval_steps_per_second": 38.153, |
| "step": 915840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.500400953115347e-05, |
| "loss": 3.6247, |
| "step": 915968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4995623583642954e-05, |
| "loss": 3.5905, |
| "step": 916480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4987237636132434e-05, |
| "loss": 3.6132, |
| "step": 916992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4978851688621913e-05, |
| "loss": 3.6125, |
| "step": 917504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.497048211991512e-05, |
| "loss": 3.6198, |
| "step": 918016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.49620961724046e-05, |
| "loss": 3.6151, |
| "step": 918528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.495371022489408e-05, |
| "loss": 3.6073, |
| "step": 919040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.494532427738356e-05, |
| "loss": 3.615, |
| "step": 919552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.493695470867677e-05, |
| "loss": 3.6063, |
| "step": 920064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.492856876116625e-05, |
| "loss": 3.6074, |
| "step": 920576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.492018281365573e-05, |
| "loss": 3.6124, |
| "step": 921088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.491179686614521e-05, |
| "loss": 3.6124, |
| "step": 921600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4903443676242156e-05, |
| "loss": 3.6196, |
| "step": 922112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4895057728731636e-05, |
| "loss": 3.5955, |
| "step": 922624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4886671781221116e-05, |
| "loss": 3.5965, |
| "step": 923136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4878285833710596e-05, |
| "loss": 3.5966, |
| "step": 923648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4869899886200076e-05, |
| "loss": 3.61, |
| "step": 924160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4861513938689556e-05, |
| "loss": 3.6033, |
| "step": 924672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4853127991179036e-05, |
| "loss": 3.603, |
| "step": 925184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4844742043668516e-05, |
| "loss": 3.6131, |
| "step": 925696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4836372474961725e-05, |
| "loss": 3.6266, |
| "step": 926208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4827986527451205e-05, |
| "loss": 3.6138, |
| "step": 926720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4819600579940685e-05, |
| "loss": 3.6095, |
| "step": 927232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.481121463243016e-05, |
| "loss": 3.6098, |
| "step": 927744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4802845063723374e-05, |
| "loss": 3.6193, |
| "step": 928256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4794459116212854e-05, |
| "loss": 3.6017, |
| "step": 928768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.478607316870234e-05, |
| "loss": 3.6012, |
| "step": 929280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4777687221191814e-05, |
| "loss": 3.6165, |
| "step": 929792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4769301273681294e-05, |
| "loss": 3.5902, |
| "step": 930304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.476093170497451e-05, |
| "loss": 3.6, |
| "step": 930816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.475254575746399e-05, |
| "loss": 3.6082, |
| "step": 931328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.474415980995346e-05, |
| "loss": 3.6054, |
| "step": 931840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.473577386244294e-05, |
| "loss": 3.6152, |
| "step": 932352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.472740429373616e-05, |
| "loss": 3.6088, |
| "step": 932864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.471901834622563e-05, |
| "loss": 3.6044, |
| "step": 933376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.471063239871511e-05, |
| "loss": 3.6028, |
| "step": 933888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.470224645120459e-05, |
| "loss": 3.6031, |
| "step": 934400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.469387688249781e-05, |
| "loss": 3.5962, |
| "step": 934912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.468549093498729e-05, |
| "loss": 3.6065, |
| "step": 935424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.467710498747677e-05, |
| "loss": 3.597, |
| "step": 935936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.466871903996625e-05, |
| "loss": 3.6015, |
| "step": 936448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4660349471259464e-05, |
| "loss": 3.6032, |
| "step": 936960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.465196352374894e-05, |
| "loss": 3.618, |
| "step": 937472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.464357757623842e-05, |
| "loss": 3.5969, |
| "step": 937984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.463520800753163e-05, |
| "loss": 3.6065, |
| "step": 938496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4626822060021106e-05, |
| "loss": 3.6062, |
| "step": 939008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4618436112510586e-05, |
| "loss": 3.6, |
| "step": 939520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4610050165000066e-05, |
| "loss": 3.6051, |
| "step": 940032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4601664217489546e-05, |
| "loss": 3.5874, |
| "step": 940544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.459329464878276e-05, |
| "loss": 3.6055, |
| "step": 941056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.458490870127224e-05, |
| "loss": 3.6032, |
| "step": 941568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.457652275376172e-05, |
| "loss": 3.6052, |
| "step": 942080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.456815318505494e-05, |
| "loss": 3.5985, |
| "step": 942592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455976723754441e-05, |
| "loss": 3.5878, |
| "step": 943104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455138129003389e-05, |
| "loss": 3.587, |
| "step": 943616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.454299534252337e-05, |
| "loss": 3.5868, |
| "step": 944128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.453460939501285e-05, |
| "loss": 3.6149, |
| "step": 944640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.452622344750233e-05, |
| "loss": 3.5917, |
| "step": 945152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.451783749999181e-05, |
| "loss": 3.6116, |
| "step": 945664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.450945155248129e-05, |
| "loss": 3.6045, |
| "step": 946176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.45010819837745e-05, |
| "loss": 3.5885, |
| "step": 946688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4492696036263986e-05, |
| "loss": 3.5966, |
| "step": 947200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4484310088753466e-05, |
| "loss": 3.5919, |
| "step": 947712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4475924141242946e-05, |
| "loss": 3.5894, |
| "step": 948224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4467554572536155e-05, |
| "loss": 3.5977, |
| "step": 948736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4459168625025635e-05, |
| "loss": 3.601, |
| "step": 949248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4450782677515115e-05, |
| "loss": 3.5927, |
| "step": 949760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4442396730004595e-05, |
| "loss": 3.5969, |
| "step": 950272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4434027161297804e-05, |
| "loss": 3.5802, |
| "step": 950784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4425641213787284e-05, |
| "loss": 3.595, |
| "step": 951296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4417255266276764e-05, |
| "loss": 3.5994, |
| "step": 951808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4408869318766244e-05, |
| "loss": 3.6044, |
| "step": 952320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.440049975005945e-05, |
| "loss": 3.6065, |
| "step": 952832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439211380254894e-05, |
| "loss": 3.5979, |
| "step": 953344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.438372785503842e-05, |
| "loss": 3.607, |
| "step": 953856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43753419075279e-05, |
| "loss": 3.5949, |
| "step": 954368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.436697233882111e-05, |
| "loss": 3.6085, |
| "step": 954880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435858639131059e-05, |
| "loss": 3.5939, |
| "step": 955392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435020044380007e-05, |
| "loss": 3.5948, |
| "step": 955904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.434181449628955e-05, |
| "loss": 3.5928, |
| "step": 956416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.433344492758276e-05, |
| "loss": 3.6017, |
| "step": 956928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.432505898007224e-05, |
| "loss": 3.6019, |
| "step": 957440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.431667303256172e-05, |
| "loss": 3.6052, |
| "step": 957952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43082870850512e-05, |
| "loss": 3.5874, |
| "step": 958464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4299917516344407e-05, |
| "loss": 3.578, |
| "step": 958976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.429153156883389e-05, |
| "loss": 3.6, |
| "step": 959488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.428314562132337e-05, |
| "loss": 3.6005, |
| "step": 960000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.427475967381285e-05, |
| "loss": 3.5919, |
| "step": 960512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.426639010510606e-05, |
| "loss": 3.5981, |
| "step": 961024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.425800415759554e-05, |
| "loss": 3.5972, |
| "step": 961536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.424961821008502e-05, |
| "loss": 3.5919, |
| "step": 962048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.42412322625745e-05, |
| "loss": 3.5964, |
| "step": 962560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.423286269386771e-05, |
| "loss": 3.5901, |
| "step": 963072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.422447674635719e-05, |
| "loss": 3.6014, |
| "step": 963584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.421609079884667e-05, |
| "loss": 3.6079, |
| "step": 964096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.420770485133615e-05, |
| "loss": 3.6103, |
| "step": 964608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419933528262936e-05, |
| "loss": 3.5838, |
| "step": 965120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419094933511885e-05, |
| "loss": 3.5984, |
| "step": 965632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418256338760833e-05, |
| "loss": 3.5959, |
| "step": 966144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.417417744009781e-05, |
| "loss": 3.5973, |
| "step": 966656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4165807871391016e-05, |
| "loss": 3.5984, |
| "step": 967168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4157421923880496e-05, |
| "loss": 3.5945, |
| "step": 967680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4149035976369976e-05, |
| "loss": 3.5938, |
| "step": 968192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4140650028859456e-05, |
| "loss": 3.5887, |
| "step": 968704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4132280460152665e-05, |
| "loss": 3.5942, |
| "step": 969216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4123894512642145e-05, |
| "loss": 3.5922, |
| "step": 969728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4115508565131625e-05, |
| "loss": 3.5874, |
| "step": 970240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4107122617621105e-05, |
| "loss": 3.5984, |
| "step": 970752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4098753048914314e-05, |
| "loss": 3.5941, |
| "step": 971264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40903671014038e-05, |
| "loss": 3.587, |
| "step": 971776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.408198115389328e-05, |
| "loss": 3.5971, |
| "step": 972288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.407359520638276e-05, |
| "loss": 3.5868, |
| "step": 972800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.406522563767597e-05, |
| "loss": 3.6022, |
| "step": 973312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.405683969016545e-05, |
| "loss": 3.5973, |
| "step": 973824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.404845374265493e-05, |
| "loss": 3.5952, |
| "step": 974336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.404006779514441e-05, |
| "loss": 3.6011, |
| "step": 974848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.403169822643762e-05, |
| "loss": 3.6098, |
| "step": 975360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40233122789271e-05, |
| "loss": 3.5924, |
| "step": 975872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.401492633141658e-05, |
| "loss": 3.6003, |
| "step": 976384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.400654038390606e-05, |
| "loss": 3.5885, |
| "step": 976896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.399817081519927e-05, |
| "loss": 3.5978, |
| "step": 977408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398978486768875e-05, |
| "loss": 3.5927, |
| "step": 977920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3981398920178234e-05, |
| "loss": 3.5824, |
| "step": 978432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3973012972667714e-05, |
| "loss": 3.5987, |
| "step": 978944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.396464340396092e-05, |
| "loss": 3.6009, |
| "step": 979456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.39562574564504e-05, |
| "loss": 3.5982, |
| "step": 979968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.394787150893988e-05, |
| "loss": 3.5945, |
| "step": 980480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.393948556142936e-05, |
| "loss": 3.582, |
| "step": 980992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.393111599272257e-05, |
| "loss": 3.5873, |
| "step": 981504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.392273004521205e-05, |
| "loss": 3.6031, |
| "step": 982016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.391434409770153e-05, |
| "loss": 3.5968, |
| "step": 982528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.390595815019101e-05, |
| "loss": 3.6064, |
| "step": 983040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.389758858148422e-05, |
| "loss": 3.5793, |
| "step": 983552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.38892026339737e-05, |
| "loss": 3.5967, |
| "step": 984064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.388081668646319e-05, |
| "loss": 3.5952, |
| "step": 984576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387243073895266e-05, |
| "loss": 3.5988, |
| "step": 985088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.386406117024588e-05, |
| "loss": 3.5897, |
| "step": 985600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.385567522273536e-05, |
| "loss": 3.5919, |
| "step": 986112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.384728927522484e-05, |
| "loss": 3.5967, |
| "step": 986624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.383890332771431e-05, |
| "loss": 3.6138, |
| "step": 987136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3830533759007526e-05, |
| "loss": 3.5917, |
| "step": 987648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3822147811497006e-05, |
| "loss": 3.5992, |
| "step": 988160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3813761863986486e-05, |
| "loss": 3.5897, |
| "step": 988672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.380537591647596e-05, |
| "loss": 3.5963, |
| "step": 989184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3797006347769175e-05, |
| "loss": 3.5879, |
| "step": 989696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3788620400258655e-05, |
| "loss": 3.5969, |
| "step": 990208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3780234452748135e-05, |
| "loss": 3.5899, |
| "step": 990720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3771848505237615e-05, |
| "loss": 3.5933, |
| "step": 991232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.376347893653083e-05, |
| "loss": 3.5966, |
| "step": 991744 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8627939224243164, |
| "eval_runtime": 307.1609, |
| "eval_samples_per_second": 1242.316, |
| "eval_steps_per_second": 38.823, |
| "step": 992160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.375509298902031e-05, |
| "loss": 3.5953, |
| "step": 992256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3746707041509784e-05, |
| "loss": 3.577, |
| "step": 992768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3738321093999264e-05, |
| "loss": 3.5921, |
| "step": 993280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372995152529248e-05, |
| "loss": 3.6016, |
| "step": 993792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372156557778196e-05, |
| "loss": 3.5969, |
| "step": 994304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.371317963027143e-05, |
| "loss": 3.6013, |
| "step": 994816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.370479368276091e-05, |
| "loss": 3.5863, |
| "step": 995328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.369642411405413e-05, |
| "loss": 3.5997, |
| "step": 995840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.368803816654361e-05, |
| "loss": 3.5893, |
| "step": 996352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367965221903309e-05, |
| "loss": 3.5889, |
| "step": 996864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367126627152257e-05, |
| "loss": 3.5937, |
| "step": 997376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3662896702815784e-05, |
| "loss": 3.5937, |
| "step": 997888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3654527134108993e-05, |
| "loss": 3.6049, |
| "step": 998400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3646141186598473e-05, |
| "loss": 3.5785, |
| "step": 998912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.363775523908795e-05, |
| "loss": 3.5764, |
| "step": 999424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3629369291577427e-05, |
| "loss": 3.5834, |
| "step": 999936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3620983344066906e-05, |
| "loss": 3.5913, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3612597396556386e-05, |
| "loss": 3.5832, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3604211449045866e-05, |
| "loss": 3.5859, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3595825501535346e-05, |
| "loss": 3.5973, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.358745593282856e-05, |
| "loss": 3.6037, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.357906998531804e-05, |
| "loss": 3.5981, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.357068403780752e-05, |
| "loss": 3.5925, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3562298090297e-05, |
| "loss": 3.5927, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.355392852159021e-05, |
| "loss": 3.6011, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.354554257407969e-05, |
| "loss": 3.5864, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.353715662656917e-05, |
| "loss": 3.5816, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.352877067905865e-05, |
| "loss": 3.6012, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.352038473154813e-05, |
| "loss": 3.5717, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351201516284134e-05, |
| "loss": 3.5838, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.350362921533082e-05, |
| "loss": 3.5849, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34952432678203e-05, |
| "loss": 3.5926, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.348685732030979e-05, |
| "loss": 3.5955, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3478487751602996e-05, |
| "loss": 3.5899, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3470101804092476e-05, |
| "loss": 3.589, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3461715856581956e-05, |
| "loss": 3.5858, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3453329909071436e-05, |
| "loss": 3.5845, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3444960340364645e-05, |
| "loss": 3.5755, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3436574392854125e-05, |
| "loss": 3.5917, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3428188445343605e-05, |
| "loss": 3.5792, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3419802497833085e-05, |
| "loss": 3.5855, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3411432929126294e-05, |
| "loss": 3.5848, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3403046981615774e-05, |
| "loss": 3.5985, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3394661034105254e-05, |
| "loss": 3.5842, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.338627508659474e-05, |
| "loss": 3.588, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.337790551788795e-05, |
| "loss": 3.5878, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336951957037743e-05, |
| "loss": 3.5853, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336113362286691e-05, |
| "loss": 3.5851, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335274767535639e-05, |
| "loss": 3.5698, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.33443781066496e-05, |
| "loss": 3.5854, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.333599215913908e-05, |
| "loss": 3.589, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.332760621162856e-05, |
| "loss": 3.5842, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331923664292177e-05, |
| "loss": 3.5802, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331085069541125e-05, |
| "loss": 3.5689, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330246474790073e-05, |
| "loss": 3.5747, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.329407880039021e-05, |
| "loss": 3.5682, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3285692852879694e-05, |
| "loss": 3.6, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3277306905369174e-05, |
| "loss": 3.5732, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3268920957858654e-05, |
| "loss": 3.599, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3260535010348134e-05, |
| "loss": 3.5846, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325216544164134e-05, |
| "loss": 3.5715, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.324377949413082e-05, |
| "loss": 3.5769, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.32353935466203e-05, |
| "loss": 3.5751, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.322700759910978e-05, |
| "loss": 3.5749, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321863803040299e-05, |
| "loss": 3.576, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321025208289247e-05, |
| "loss": 3.5881, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320186613538195e-05, |
| "loss": 3.5791, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.319348018787143e-05, |
| "loss": 3.5758, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.318511061916465e-05, |
| "loss": 3.5682, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.317672467165413e-05, |
| "loss": 3.578, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.316833872414361e-05, |
| "loss": 3.5804, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315995277663309e-05, |
| "loss": 3.5891, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.31515832079263e-05, |
| "loss": 3.5888, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.314319726041578e-05, |
| "loss": 3.582, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.313481131290526e-05, |
| "loss": 3.5886, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3126425365394737e-05, |
| "loss": 3.5808, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3118055796687946e-05, |
| "loss": 3.5884, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3109669849177426e-05, |
| "loss": 3.5765, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3101283901666906e-05, |
| "loss": 3.5812, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3092897954156386e-05, |
| "loss": 3.5762, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3084528385449595e-05, |
| "loss": 3.5849, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.307614243793908e-05, |
| "loss": 3.5888, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.306775649042856e-05, |
| "loss": 3.5862, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.305937054291804e-05, |
| "loss": 3.5714, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.305100097421125e-05, |
| "loss": 3.5568, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.304261502670073e-05, |
| "loss": 3.5831, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.303422907919021e-05, |
| "loss": 3.5817, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3025843131679683e-05, |
| "loss": 3.58, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.30174735629729e-05, |
| "loss": 3.5777, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.300908761546238e-05, |
| "loss": 3.5836, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.300070166795186e-05, |
| "loss": 3.5719, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299231572044133e-05, |
| "loss": 3.5785, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.298394615173455e-05, |
| "loss": 3.5743, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2975560204224035e-05, |
| "loss": 3.5829, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.296717425671351e-05, |
| "loss": 3.5895, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.295878830920299e-05, |
| "loss": 3.5916, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2950418740496204e-05, |
| "loss": 3.5726, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2942032792985684e-05, |
| "loss": 3.5773, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.293364684547516e-05, |
| "loss": 3.5804, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.292526089796464e-05, |
| "loss": 3.5818, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.291689132925785e-05, |
| "loss": 3.5794, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.290850538174733e-05, |
| "loss": 3.5797, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2900119434236806e-05, |
| "loss": 3.5739, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2891733486726286e-05, |
| "loss": 3.5735, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.28833639180195e-05, |
| "loss": 3.5804, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.287497797050898e-05, |
| "loss": 3.5713, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.286659202299846e-05, |
| "loss": 3.5734, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.285822245429168e-05, |
| "loss": 3.5802, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284983650678116e-05, |
| "loss": 3.5747, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284145055927063e-05, |
| "loss": 3.5737, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.283306461176011e-05, |
| "loss": 3.5862, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.282469504305333e-05, |
| "loss": 3.5678, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.281630909554281e-05, |
| "loss": 3.5845, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.280792314803228e-05, |
| "loss": 3.5804, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279953720052176e-05, |
| "loss": 3.5753, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279115125301124e-05, |
| "loss": 3.5853, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2782781684304456e-05, |
| "loss": 3.5937, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2774395736793936e-05, |
| "loss": 3.5764, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2766009789283416e-05, |
| "loss": 3.5833, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2757623841772896e-05, |
| "loss": 3.5714, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2749254273066105e-05, |
| "loss": 3.58, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2740868325555585e-05, |
| "loss": 3.5743, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2732482378045065e-05, |
| "loss": 3.5702, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2724096430534544e-05, |
| "loss": 3.5762, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2715726861827754e-05, |
| "loss": 3.5861, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2707340914317234e-05, |
| "loss": 3.5799, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2698954966806713e-05, |
| "loss": 3.5782, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2690569019296193e-05, |
| "loss": 3.5667, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.268219945058941e-05, |
| "loss": 3.5725, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.267381350307889e-05, |
| "loss": 3.5839, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.266542755556837e-05, |
| "loss": 3.5791, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.265704160805785e-05, |
| "loss": 3.5893, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264867203935106e-05, |
| "loss": 3.5649, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264028609184054e-05, |
| "loss": 3.5805, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.263190014433002e-05, |
| "loss": 3.5806, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.26235141968195e-05, |
| "loss": 3.5813, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.261514462811271e-05, |
| "loss": 3.5735, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.260675868060219e-05, |
| "loss": 3.5762, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.259837273309167e-05, |
| "loss": 3.5847, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.258998678558115e-05, |
| "loss": 3.5927, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.258161721687436e-05, |
| "loss": 3.5768, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.257323126936384e-05, |
| "loss": 3.5794, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.256484532185332e-05, |
| "loss": 3.5723, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.25564593743428e-05, |
| "loss": 3.58, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.254808980563601e-05, |
| "loss": 3.5731, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253970385812549e-05, |
| "loss": 3.5849, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253131791061497e-05, |
| "loss": 3.5729, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252293196310445e-05, |
| "loss": 3.5785, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.251456239439766e-05, |
| "loss": 3.5783, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8637235164642334, |
| "eval_runtime": 308.9231, |
| "eval_samples_per_second": 1235.23, |
| "eval_steps_per_second": 38.602, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.250617644688714e-05, |
| "loss": 3.5648, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.249779049937662e-05, |
| "loss": 3.5673, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.24894045518661e-05, |
| "loss": 3.5737, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.248101860435559e-05, |
| "loss": 3.5849, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2472649035648797e-05, |
| "loss": 3.5804, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2464263088138277e-05, |
| "loss": 3.5868, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2455877140627757e-05, |
| "loss": 3.5691, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2447491193117236e-05, |
| "loss": 3.5839, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2439121624410446e-05, |
| "loss": 3.5725, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2430735676899926e-05, |
| "loss": 3.5735, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2422349729389405e-05, |
| "loss": 3.5742, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2413963781878885e-05, |
| "loss": 3.5793, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2405610591975824e-05, |
| "loss": 3.5877, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.239722464446531e-05, |
| "loss": 3.5672, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.238883869695479e-05, |
| "loss": 3.5545, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.238045274944427e-05, |
| "loss": 3.5688, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237206680193375e-05, |
| "loss": 3.5718, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.236368085442323e-05, |
| "loss": 3.5701, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.235529490691271e-05, |
| "loss": 3.5703, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.234690895940219e-05, |
| "loss": 3.5771, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23385393906954e-05, |
| "loss": 3.591, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.233015344318488e-05, |
| "loss": 3.5788, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232176749567436e-05, |
| "loss": 3.5781, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.231338154816384e-05, |
| "loss": 3.5766, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.230501197945705e-05, |
| "loss": 3.5842, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.229662603194653e-05, |
| "loss": 3.567, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.228824008443601e-05, |
| "loss": 3.5713, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.227985413692549e-05, |
| "loss": 3.5812, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2271468189414975e-05, |
| "loss": 3.5572, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2263098620708184e-05, |
| "loss": 3.5642, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2254712673197664e-05, |
| "loss": 3.569, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2246326725687144e-05, |
| "loss": 3.5763, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2237940778176624e-05, |
| "loss": 3.5831, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.222957120946983e-05, |
| "loss": 3.5675, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.222118526195931e-05, |
| "loss": 3.5783, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221279931444879e-05, |
| "loss": 3.5709, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.220441336693827e-05, |
| "loss": 3.5648, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.219604379823148e-05, |
| "loss": 3.5605, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.218765785072096e-05, |
| "loss": 3.5737, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217927190321044e-05, |
| "loss": 3.5623, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217088595569993e-05, |
| "loss": 3.5678, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.216251638699314e-05, |
| "loss": 3.5714, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.215413043948262e-05, |
| "loss": 3.5848, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.21457444919721e-05, |
| "loss": 3.5649, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.213735854446158e-05, |
| "loss": 3.574, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2128988975754787e-05, |
| "loss": 3.5722, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2120603028244266e-05, |
| "loss": 3.5655, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2112217080733746e-05, |
| "loss": 3.5715, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2103831133223226e-05, |
| "loss": 3.5564, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2095461564516435e-05, |
| "loss": 3.5669, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2087075617005915e-05, |
| "loss": 3.5766, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2078689669495395e-05, |
| "loss": 3.567, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.207032010078861e-05, |
| "loss": 3.5652, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206193415327809e-05, |
| "loss": 3.5502, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.205354820576757e-05, |
| "loss": 3.5638, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.204516225825705e-05, |
| "loss": 3.5496, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.203677631074653e-05, |
| "loss": 3.5855, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2028390363236004e-05, |
| "loss": 3.5584, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2020004415725484e-05, |
| "loss": 3.5831, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2011618468214964e-05, |
| "loss": 3.5666, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.200324889950818e-05, |
| "loss": 3.5519, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.199486295199765e-05, |
| "loss": 3.5625, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.198647700448713e-05, |
| "loss": 3.5657, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.197809105697662e-05, |
| "loss": 3.5569, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1969721488269836e-05, |
| "loss": 3.5539, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.196133554075931e-05, |
| "loss": 3.5747, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.195294959324879e-05, |
| "loss": 3.568, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.194456364573827e-05, |
| "loss": 3.5554, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.193619407703148e-05, |
| "loss": 3.5524, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.192780812952096e-05, |
| "loss": 3.5613, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.191942218201044e-05, |
| "loss": 3.5655, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.191103623449992e-05, |
| "loss": 3.5753, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.190266666579313e-05, |
| "loss": 3.5693, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.189428071828261e-05, |
| "loss": 3.5697, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.188589477077209e-05, |
| "loss": 3.5722, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1877508823261574e-05, |
| "loss": 3.565, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.186913925455478e-05, |
| "loss": 3.5713, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.186075330704426e-05, |
| "loss": 3.5637, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.185236735953374e-05, |
| "loss": 3.5684, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.184398141202322e-05, |
| "loss": 3.5615, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.183561184331643e-05, |
| "loss": 3.564, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.182722589580591e-05, |
| "loss": 3.5729, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181883994829539e-05, |
| "loss": 3.5737, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181045400078487e-05, |
| "loss": 3.5549, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.180208443207808e-05, |
| "loss": 3.5416, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.179369848456756e-05, |
| "loss": 3.5702, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.178531253705704e-05, |
| "loss": 3.5617, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.177692658954653e-05, |
| "loss": 3.5682, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1768557020839736e-05, |
| "loss": 3.5617, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1760171073329216e-05, |
| "loss": 3.567, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1751785125818696e-05, |
| "loss": 3.5625, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1743399178308176e-05, |
| "loss": 3.5578, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1735029609601385e-05, |
| "loss": 3.5583, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1726643662090865e-05, |
| "loss": 3.5659, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1718257714580345e-05, |
| "loss": 3.5745, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1709871767069825e-05, |
| "loss": 3.5761, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1701502198363034e-05, |
| "loss": 3.5572, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1693116250852514e-05, |
| "loss": 3.5629, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1684730303341994e-05, |
| "loss": 3.5649, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.167634435583148e-05, |
| "loss": 3.565, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.166797478712469e-05, |
| "loss": 3.5634, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.165958883961417e-05, |
| "loss": 3.5649, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.165120289210365e-05, |
| "loss": 3.5583, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164281694459313e-05, |
| "loss": 3.5551, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.163444737588634e-05, |
| "loss": 3.569, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.162606142837582e-05, |
| "loss": 3.552, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.16176754808653e-05, |
| "loss": 3.5558, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.160928953335478e-05, |
| "loss": 3.5689, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.160091996464799e-05, |
| "loss": 3.5574, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.159253401713747e-05, |
| "loss": 3.5571, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.158414806962695e-05, |
| "loss": 3.5708, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1575762122116435e-05, |
| "loss": 3.5533, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1567392553409644e-05, |
| "loss": 3.5669, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1559006605899124e-05, |
| "loss": 3.5643, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1550620658388604e-05, |
| "loss": 3.5621, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1542234710878084e-05, |
| "loss": 3.5681, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.153386514217129e-05, |
| "loss": 3.5762, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.152547919466077e-05, |
| "loss": 3.5634, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.151709324715025e-05, |
| "loss": 3.5646, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150870729963973e-05, |
| "loss": 3.5586, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150033773093294e-05, |
| "loss": 3.5654, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149195178342242e-05, |
| "loss": 3.5565, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.14835658359119e-05, |
| "loss": 3.5569, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.147517988840139e-05, |
| "loss": 3.56, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.14668103196946e-05, |
| "loss": 3.5699, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145842437218408e-05, |
| "loss": 3.5635, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145003842467356e-05, |
| "loss": 3.5646, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.144165247716304e-05, |
| "loss": 3.5526, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1433282908456246e-05, |
| "loss": 3.5566, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1424896960945726e-05, |
| "loss": 3.5626, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1416511013435206e-05, |
| "loss": 3.5672, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1408125065924686e-05, |
| "loss": 3.5706, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1399755497217895e-05, |
| "loss": 3.5504, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1391369549707375e-05, |
| "loss": 3.5658, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1382983602196855e-05, |
| "loss": 3.5637, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1374597654686335e-05, |
| "loss": 3.5639, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.136622808597955e-05, |
| "loss": 3.5563, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.135784213846903e-05, |
| "loss": 3.5622, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134945619095851e-05, |
| "loss": 3.5678, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134107024344799e-05, |
| "loss": 3.5751, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.13327006747412e-05, |
| "loss": 3.562, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.132431472723068e-05, |
| "loss": 3.5627, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.131592877972016e-05, |
| "loss": 3.5589, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.130755921101337e-05, |
| "loss": 3.5614, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129917326350285e-05, |
| "loss": 3.557, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129078731599233e-05, |
| "loss": 3.5709, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128240136848181e-05, |
| "loss": 3.5542, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.127401542097129e-05, |
| "loss": 3.5709, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1265645852264505e-05, |
| "loss": 3.5567, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8642282485961914, |
| "eval_runtime": 312.5473, |
| "eval_samples_per_second": 1220.906, |
| "eval_steps_per_second": 38.154, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1257259904753985e-05, |
| "loss": 3.5632, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1248873957243465e-05, |
| "loss": 3.5512, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1240488009732945e-05, |
| "loss": 3.5555, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1232118441026154e-05, |
| "loss": 3.5673, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1223732493515634e-05, |
| "loss": 3.5643, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1215346546005114e-05, |
| "loss": 3.5727, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1206960598494594e-05, |
| "loss": 3.5548, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.11985910297878e-05, |
| "loss": 3.5681, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.119020508227728e-05, |
| "loss": 3.5542, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.118181913476676e-05, |
| "loss": 3.5595, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.117343318725624e-05, |
| "loss": 3.5586, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.116506361854946e-05, |
| "loss": 3.5655, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.115667767103894e-05, |
| "loss": 3.5653, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.114830810233215e-05, |
| "loss": 3.559, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.113992215482163e-05, |
| "loss": 3.5449, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.113153620731111e-05, |
| "loss": 3.5522, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.112315025980059e-05, |
| "loss": 3.5495, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.111476431229007e-05, |
| "loss": 3.5539, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.110637836477955e-05, |
| "loss": 3.5585, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.109799241726903e-05, |
| "loss": 3.5583, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1089622848562236e-05, |
| "loss": 3.5781, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1081236901051716e-05, |
| "loss": 3.5653, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1072850953541196e-05, |
| "loss": 3.5628, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.106446500603068e-05, |
| "loss": 3.5622, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.105609543732389e-05, |
| "loss": 3.5724, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.104770948981337e-05, |
| "loss": 3.548, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.103932354230285e-05, |
| "loss": 3.558, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1030937594792325e-05, |
| "loss": 3.5629, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1022551647281805e-05, |
| "loss": 3.5502, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.101418207857502e-05, |
| "loss": 3.5443, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.10057961310645e-05, |
| "loss": 3.5561, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0997410183553974e-05, |
| "loss": 3.5639, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0989024236043454e-05, |
| "loss": 3.5639, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.098065466733667e-05, |
| "loss": 3.5575, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.097226871982615e-05, |
| "loss": 3.5623, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.096388277231563e-05, |
| "loss": 3.5526, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.095549682480511e-05, |
| "loss": 3.5473, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0947127256098326e-05, |
| "loss": 3.5463, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.09387413085878e-05, |
| "loss": 3.5582, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.093035536107728e-05, |
| "loss": 3.5463, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.092196941356676e-05, |
| "loss": 3.5566, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0913616223663704e-05, |
| "loss": 3.5525, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0905230276153184e-05, |
| "loss": 3.5713, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0896844328642664e-05, |
| "loss": 3.549, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0888458381132144e-05, |
| "loss": 3.5573, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0880072433621624e-05, |
| "loss": 3.5598, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0871686486111103e-05, |
| "loss": 3.5509, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0863300538600583e-05, |
| "loss": 3.559, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0854914591090063e-05, |
| "loss": 3.5404, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.084654502238327e-05, |
| "loss": 3.5524, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.083815907487275e-05, |
| "loss": 3.5576, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.082977312736223e-05, |
| "loss": 3.5524, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.082140355865545e-05, |
| "loss": 3.5514, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.081301761114492e-05, |
| "loss": 3.5313, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.08046316636344e-05, |
| "loss": 3.5503, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.079624571612388e-05, |
| "loss": 3.5333, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.078785976861336e-05, |
| "loss": 3.5677, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.077947382110284e-05, |
| "loss": 3.5461, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.077108787359233e-05, |
| "loss": 3.5632, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.076270192608181e-05, |
| "loss": 3.5577, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.075433235737502e-05, |
| "loss": 3.5389, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.07459464098645e-05, |
| "loss": 3.545, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.073756046235398e-05, |
| "loss": 3.5483, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.072917451484346e-05, |
| "loss": 3.5435, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0720804946136666e-05, |
| "loss": 3.5368, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0712418998626146e-05, |
| "loss": 3.5607, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0704033051115626e-05, |
| "loss": 3.557, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0695647103605106e-05, |
| "loss": 3.5413, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0687277534898315e-05, |
| "loss": 3.534, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0678891587387795e-05, |
| "loss": 3.5447, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.067050563987728e-05, |
| "loss": 3.5477, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.066211969236676e-05, |
| "loss": 3.5617, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.065375012365997e-05, |
| "loss": 3.554, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.064536417614945e-05, |
| "loss": 3.5541, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.063697822863893e-05, |
| "loss": 3.5528, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.062859228112841e-05, |
| "loss": 3.5554, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.062022271242162e-05, |
| "loss": 3.5535, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.06118367649111e-05, |
| "loss": 3.5487, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.060345081740058e-05, |
| "loss": 3.555, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.059506486989006e-05, |
| "loss": 3.5441, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.058669530118327e-05, |
| "loss": 3.5514, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.057830935367275e-05, |
| "loss": 3.5587, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.056992340616223e-05, |
| "loss": 3.5537, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0561537458651715e-05, |
| "loss": 3.541, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0553167889944924e-05, |
| "loss": 3.5295, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0544781942434404e-05, |
| "loss": 3.5518, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0536395994923884e-05, |
| "loss": 3.5496, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0528010047413364e-05, |
| "loss": 3.5504, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.051964047870657e-05, |
| "loss": 3.5496, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0511254531196053e-05, |
| "loss": 3.5515, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0502868583685533e-05, |
| "loss": 3.548, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0494482636175013e-05, |
| "loss": 3.5394, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0486113067468226e-05, |
| "loss": 3.5454, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0477727119957706e-05, |
| "loss": 3.5514, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0469341172447186e-05, |
| "loss": 3.5601, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0460955224936666e-05, |
| "loss": 3.5565, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0452585656229875e-05, |
| "loss": 3.5539, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0444199708719358e-05, |
| "loss": 3.5422, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0435813761208838e-05, |
| "loss": 3.5494, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0427427813698318e-05, |
| "loss": 3.5511, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0419058244991527e-05, |
| "loss": 3.5481, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0410672297481007e-05, |
| "loss": 3.5491, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0402286349970487e-05, |
| "loss": 3.5458, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0393900402459967e-05, |
| "loss": 3.5377, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.038553083375318e-05, |
| "loss": 3.5573, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.037714488624266e-05, |
| "loss": 3.5333, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.036875893873214e-05, |
| "loss": 3.5457, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.036037299122162e-05, |
| "loss": 3.5519, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.035200342251483e-05, |
| "loss": 3.5472, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.034361747500431e-05, |
| "loss": 3.5425, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.033523152749379e-05, |
| "loss": 3.5499, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.032684557998327e-05, |
| "loss": 3.5429, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.031847601127648e-05, |
| "loss": 3.5508, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.031009006376596e-05, |
| "loss": 3.549, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.030170411625544e-05, |
| "loss": 3.5497, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.029331816874492e-05, |
| "loss": 3.5537, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0284948600038133e-05, |
| "loss": 3.5604, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0276562652527613e-05, |
| "loss": 3.5474, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0268176705017093e-05, |
| "loss": 3.5467, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0259790757506573e-05, |
| "loss": 3.547, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0251421188799782e-05, |
| "loss": 3.5488, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0243035241289265e-05, |
| "loss": 3.5381, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0234649293778745e-05, |
| "loss": 3.5424, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0226263346268225e-05, |
| "loss": 3.5479, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0217893777561434e-05, |
| "loss": 3.5551, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0209507830050914e-05, |
| "loss": 3.5472, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0201121882540394e-05, |
| "loss": 3.5514, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0192735935029874e-05, |
| "loss": 3.539, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0184366366323087e-05, |
| "loss": 3.5446, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0175980418812567e-05, |
| "loss": 3.5452, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0167594471302047e-05, |
| "loss": 3.5568, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0159208523791527e-05, |
| "loss": 3.555, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0150838955084736e-05, |
| "loss": 3.5375, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0142453007574216e-05, |
| "loss": 3.5465, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.01340670600637e-05, |
| "loss": 3.555, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.012568111255318e-05, |
| "loss": 3.5488, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0117311543846388e-05, |
| "loss": 3.5451, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0108925596335868e-05, |
| "loss": 3.5439, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0100539648825348e-05, |
| "loss": 3.5562, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0092153701314825e-05, |
| "loss": 3.5599, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.008378413260804e-05, |
| "loss": 3.5476, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.007539818509752e-05, |
| "loss": 3.5486, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0067012237587e-05, |
| "loss": 3.5451, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0058626290076473e-05, |
| "loss": 3.5438, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.005025672136969e-05, |
| "loss": 3.5409, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.004187077385917e-05, |
| "loss": 3.5589, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0033484826348653e-05, |
| "loss": 3.5413, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0025098878838126e-05, |
| "loss": 3.5526, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.001672931013134e-05, |
| "loss": 3.5426, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 3.000834336262082e-05, |
| "loss": 3.5491, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8650646209716797, |
| "eval_runtime": 315.9203, |
| "eval_samples_per_second": 1207.871, |
| "eval_steps_per_second": 37.747, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9999957415110298e-05, |
| "loss": 3.5346, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9991571467599778e-05, |
| "loss": 3.5418, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9983185520089258e-05, |
| "loss": 3.555, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9974815951382474e-05, |
| "loss": 3.5485, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9966430003871947e-05, |
| "loss": 3.561, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9958044056361427e-05, |
| "loss": 3.5377, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9949658108850907e-05, |
| "loss": 3.5524, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9941288540144123e-05, |
| "loss": 3.5422, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.99329025926336e-05, |
| "loss": 3.5466, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.992451664512308e-05, |
| "loss": 3.5438, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.991613069761256e-05, |
| "loss": 3.55, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.990776112890577e-05, |
| "loss": 3.5499, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9899391560198984e-05, |
| "loss": 3.5432, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9891005612688468e-05, |
| "loss": 3.5316, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9882619665177948e-05, |
| "loss": 3.5415, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.987423371766742e-05, |
| "loss": 3.5305, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.98658477701569e-05, |
| "loss": 3.5426, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.985746182264638e-05, |
| "loss": 3.5445, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.984907587513586e-05, |
| "loss": 3.5424, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9840706306429073e-05, |
| "loss": 3.5632, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9832320358918553e-05, |
| "loss": 3.5504, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9823934411408033e-05, |
| "loss": 3.5508, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9815548463897513e-05, |
| "loss": 3.5461, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9807178895190722e-05, |
| "loss": 3.5559, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9798792947680206e-05, |
| "loss": 3.5413, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9790407000169686e-05, |
| "loss": 3.541, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9782021052659165e-05, |
| "loss": 3.548, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9773635105148645e-05, |
| "loss": 3.5388, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9765265536441855e-05, |
| "loss": 3.5257, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9756879588931334e-05, |
| "loss": 3.5418, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9748493641420814e-05, |
| "loss": 3.5467, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9740107693910298e-05, |
| "loss": 3.55, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9731738125203507e-05, |
| "loss": 3.5458, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9723352177692987e-05, |
| "loss": 3.5506, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9714966230182467e-05, |
| "loss": 3.5342, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9706580282671947e-05, |
| "loss": 3.5401, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.969821071396516e-05, |
| "loss": 3.527, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.968982476645464e-05, |
| "loss": 3.5447, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.968143881894412e-05, |
| "loss": 3.5325, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.96730528714336e-05, |
| "loss": 3.5413, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9664683302726808e-05, |
| "loss": 3.5395, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9656297355216288e-05, |
| "loss": 3.551, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9647911407705768e-05, |
| "loss": 3.543, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.963952546019525e-05, |
| "loss": 3.5384, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.963115589148846e-05, |
| "loss": 3.5456, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.962276994397794e-05, |
| "loss": 3.5358, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.961438399646742e-05, |
| "loss": 3.5443, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.96059980489569e-05, |
| "loss": 3.5263, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9597628480250113e-05, |
| "loss": 3.5356, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9589242532739593e-05, |
| "loss": 3.5468, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9580856585229073e-05, |
| "loss": 3.5365, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9572487016522282e-05, |
| "loss": 3.5394, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9564101069011762e-05, |
| "loss": 3.523, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9555715121501242e-05, |
| "loss": 3.5355, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9547329173990722e-05, |
| "loss": 3.5158, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9538943226480205e-05, |
| "loss": 3.5534, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9530557278969685e-05, |
| "loss": 3.5303, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9522171331459165e-05, |
| "loss": 3.5504, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9513785383948645e-05, |
| "loss": 3.5436, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9505415815241854e-05, |
| "loss": 3.5268, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9497029867731334e-05, |
| "loss": 3.5295, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9488643920220814e-05, |
| "loss": 3.5342, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9480257972710297e-05, |
| "loss": 3.5302, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9471888404003506e-05, |
| "loss": 3.5212, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9463502456492986e-05, |
| "loss": 3.5472, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9455116508982466e-05, |
| "loss": 3.5425, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9446730561471946e-05, |
| "loss": 3.5276, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.943836099276516e-05, |
| "loss": 3.5221, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.942997504525464e-05, |
| "loss": 3.53, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.942158909774412e-05, |
| "loss": 3.5386, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.94132031502336e-05, |
| "loss": 3.5443, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9404833581526808e-05, |
| "loss": 3.5387, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9396447634016288e-05, |
| "loss": 3.5405, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9388061686505768e-05, |
| "loss": 3.5406, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9379675738995248e-05, |
| "loss": 3.5404, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.937130617028846e-05, |
| "loss": 3.5348, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.936292022277794e-05, |
| "loss": 3.5364, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.935453427526742e-05, |
| "loss": 3.5405, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93461483277569e-05, |
| "loss": 3.5307, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9337778759050112e-05, |
| "loss": 3.5351, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9329392811539592e-05, |
| "loss": 3.5412, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9321006864029072e-05, |
| "loss": 3.5457, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9312620916518552e-05, |
| "loss": 3.5257, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.930425134781176e-05, |
| "loss": 3.5194, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.929586540030124e-05, |
| "loss": 3.5345, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.928747945279072e-05, |
| "loss": 3.5361, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.92790935052802e-05, |
| "loss": 3.5377, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9270723936573414e-05, |
| "loss": 3.5346, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9262337989062894e-05, |
| "loss": 3.5381, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9253952041552374e-05, |
| "loss": 3.5308, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.924556609404185e-05, |
| "loss": 3.5263, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9237196525335063e-05, |
| "loss": 3.5324, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9228810577824546e-05, |
| "loss": 3.534, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9220424630314026e-05, |
| "loss": 3.5431, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.92120386828035e-05, |
| "loss": 3.5462, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9203669114096715e-05, |
| "loss": 3.541, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9195283166586195e-05, |
| "loss": 3.5248, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9186897219075675e-05, |
| "loss": 3.5386, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.917851127156515e-05, |
| "loss": 3.5346, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9170141702858367e-05, |
| "loss": 3.5372, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9161755755347847e-05, |
| "loss": 3.5333, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.915336980783732e-05, |
| "loss": 3.5315, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.91449838603268e-05, |
| "loss": 3.5267, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9136614291620016e-05, |
| "loss": 3.5442, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.91282283441095e-05, |
| "loss": 3.5182, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9119842396598973e-05, |
| "loss": 3.532, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9111456449088453e-05, |
| "loss": 3.5379, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.910308688038167e-05, |
| "loss": 3.5284, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9094700932871145e-05, |
| "loss": 3.5288, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9086314985360625e-05, |
| "loss": 3.5387, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9077929037850105e-05, |
| "loss": 3.5274, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.906955946914332e-05, |
| "loss": 3.5403, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9061173521632794e-05, |
| "loss": 3.5364, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9052787574122274e-05, |
| "loss": 3.5332, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9044401626611754e-05, |
| "loss": 3.5367, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.903603205790497e-05, |
| "loss": 3.5502, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9027646110394447e-05, |
| "loss": 3.5337, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9019260162883927e-05, |
| "loss": 3.5338, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9010874215373407e-05, |
| "loss": 3.5351, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9002504646666616e-05, |
| "loss": 3.5329, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.89941186991561e-05, |
| "loss": 3.5285, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.898573275164558e-05, |
| "loss": 3.5268, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.897734680413506e-05, |
| "loss": 3.5348, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8968977235428268e-05, |
| "loss": 3.5438, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8960591287917748e-05, |
| "loss": 3.536, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8952205340407228e-05, |
| "loss": 3.5356, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8943819392896708e-05, |
| "loss": 3.5253, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.893544982418992e-05, |
| "loss": 3.5295, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.89270638766794e-05, |
| "loss": 3.5297, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.891867792916888e-05, |
| "loss": 3.5441, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.891029198165836e-05, |
| "loss": 3.544, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.890192241295157e-05, |
| "loss": 3.5264, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8893536465441053e-05, |
| "loss": 3.53, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8885150517930533e-05, |
| "loss": 3.5378, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8876764570420013e-05, |
| "loss": 3.531, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.886839500171322e-05, |
| "loss": 3.5377, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.88600090542027e-05, |
| "loss": 3.5282, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.885162310669218e-05, |
| "loss": 3.5442, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8843253537985394e-05, |
| "loss": 3.5429, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8834867590474874e-05, |
| "loss": 3.5361, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8826481642964354e-05, |
| "loss": 3.5365, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8818095695453834e-05, |
| "loss": 3.5344, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8809726126747043e-05, |
| "loss": 3.5286, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8801340179236523e-05, |
| "loss": 3.5303, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8792954231726006e-05, |
| "loss": 3.5475, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8784568284215486e-05, |
| "loss": 3.5266, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8776198715508695e-05, |
| "loss": 3.5353, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8767812767998175e-05, |
| "loss": 3.5342, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8759426820487655e-05, |
| "loss": 3.5327, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8676207065582275, |
| "eval_runtime": 303.4765, |
| "eval_samples_per_second": 1257.399, |
| "eval_steps_per_second": 39.295, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8751040872977135e-05, |
| "loss": 3.5217, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8742671304270348e-05, |
| "loss": 3.5295, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8734285356759828e-05, |
| "loss": 3.5432, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8725899409249308e-05, |
| "loss": 3.529, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8717513461738788e-05, |
| "loss": 3.549, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8709143893031997e-05, |
| "loss": 3.5213, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8700757945521477e-05, |
| "loss": 3.5404, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.869237199801096e-05, |
| "loss": 3.5274, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.868398605050044e-05, |
| "loss": 3.5316, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.867561648179365e-05, |
| "loss": 3.5323, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.866723053428313e-05, |
| "loss": 3.5336, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.865884458677261e-05, |
| "loss": 3.5368, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.865047501806582e-05, |
| "loss": 3.5304, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.86420890705553e-05, |
| "loss": 3.5218, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.863370312304478e-05, |
| "loss": 3.5244, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.862531717553426e-05, |
| "loss": 3.5239, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.861693122802374e-05, |
| "loss": 3.5226, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.860854528051322e-05, |
| "loss": 3.5298, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.86001593330027e-05, |
| "loss": 3.5289, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8591789764295914e-05, |
| "loss": 3.5534, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8583403816785394e-05, |
| "loss": 3.535, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8575017869274874e-05, |
| "loss": 3.5374, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8566631921764354e-05, |
| "loss": 3.5334, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8558262353057563e-05, |
| "loss": 3.538, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8549876405547043e-05, |
| "loss": 3.5269, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8541490458036523e-05, |
| "loss": 3.5321, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8533104510526006e-05, |
| "loss": 3.5323, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8524734941819215e-05, |
| "loss": 3.5239, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8516348994308695e-05, |
| "loss": 3.5105, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8507963046798175e-05, |
| "loss": 3.531, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8499577099287655e-05, |
| "loss": 3.5313, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8491207530580867e-05, |
| "loss": 3.5347, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8482821583070347e-05, |
| "loss": 3.5323, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8474435635559827e-05, |
| "loss": 3.5339, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8466049688049307e-05, |
| "loss": 3.523, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8457680119342516e-05, |
| "loss": 3.5263, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8449294171831996e-05, |
| "loss": 3.5151, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8440908224321476e-05, |
| "loss": 3.5284, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8432522276810956e-05, |
| "loss": 3.5217, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.842415270810417e-05, |
| "loss": 3.5244, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.841576676059365e-05, |
| "loss": 3.5285, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.840738081308313e-05, |
| "loss": 3.5375, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.839899486557261e-05, |
| "loss": 3.5303, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.839062529686582e-05, |
| "loss": 3.5271, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.83822393493553e-05, |
| "loss": 3.5349, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.837385340184478e-05, |
| "loss": 3.5231, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.836546745433426e-05, |
| "loss": 3.5284, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.835709788562747e-05, |
| "loss": 3.5198, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.834871193811695e-05, |
| "loss": 3.5188, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.834032599060643e-05, |
| "loss": 3.5319, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.833194004309591e-05, |
| "loss": 3.5223, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.832358685319285e-05, |
| "loss": 3.5235, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.831520090568233e-05, |
| "loss": 3.5107, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.830681495817181e-05, |
| "loss": 3.5212, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.829842901066129e-05, |
| "loss": 3.5087, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.829004306315077e-05, |
| "loss": 3.5358, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8281657115640255e-05, |
| "loss": 3.5206, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8273271168129735e-05, |
| "loss": 3.5306, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8264885220619215e-05, |
| "loss": 3.5314, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8256515651912424e-05, |
| "loss": 3.5154, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8248129704401904e-05, |
| "loss": 3.513, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8239743756891384e-05, |
| "loss": 3.5226, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8231357809380863e-05, |
| "loss": 3.5144, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8222988240674076e-05, |
| "loss": 3.5107, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8214602293163556e-05, |
| "loss": 3.5296, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8206216345653036e-05, |
| "loss": 3.5301, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8197830398142516e-05, |
| "loss": 3.513, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8189460829435725e-05, |
| "loss": 3.5112, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8181074881925208e-05, |
| "loss": 3.5144, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8172688934414688e-05, |
| "loss": 3.5245, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8164302986904168e-05, |
| "loss": 3.5244, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8155933418197377e-05, |
| "loss": 3.5262, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8147547470686857e-05, |
| "loss": 3.5293, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8139161523176337e-05, |
| "loss": 3.529, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8130775575665817e-05, |
| "loss": 3.532, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.812240600695903e-05, |
| "loss": 3.5175, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.811402005944851e-05, |
| "loss": 3.5266, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.810563411193799e-05, |
| "loss": 3.5275, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.809724816442747e-05, |
| "loss": 3.5154, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.808887859572068e-05, |
| "loss": 3.5268, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8080492648210162e-05, |
| "loss": 3.5285, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8072106700699642e-05, |
| "loss": 3.5322, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8063720753189115e-05, |
| "loss": 3.5097, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.805535118448233e-05, |
| "loss": 3.5072, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.804696523697181e-05, |
| "loss": 3.52, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.803857928946129e-05, |
| "loss": 3.5219, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8030193341950767e-05, |
| "loss": 3.5262, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8021823773243983e-05, |
| "loss": 3.5198, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8013437825733463e-05, |
| "loss": 3.5218, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8005051878222943e-05, |
| "loss": 3.5204, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7996665930712416e-05, |
| "loss": 3.512, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7988296362005632e-05, |
| "loss": 3.5182, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7979910414495116e-05, |
| "loss": 3.5212, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.797152446698459e-05, |
| "loss": 3.5303, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.796313851947407e-05, |
| "loss": 3.5316, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7954768950767285e-05, |
| "loss": 3.532, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7946383003256765e-05, |
| "loss": 3.5136, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.793799705574624e-05, |
| "loss": 3.5228, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.792961110823572e-05, |
| "loss": 3.5183, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7921241539528937e-05, |
| "loss": 3.5296, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7912855592018417e-05, |
| "loss": 3.5178, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.790446964450789e-05, |
| "loss": 3.5161, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.789608369699737e-05, |
| "loss": 3.5131, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7887714128290586e-05, |
| "loss": 3.532, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7879328180780062e-05, |
| "loss": 3.5034, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7870942233269542e-05, |
| "loss": 3.5212, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7862556285759022e-05, |
| "loss": 3.5233, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7854186717052238e-05, |
| "loss": 3.5144, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7845800769541715e-05, |
| "loss": 3.5143, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7837414822031195e-05, |
| "loss": 3.5272, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7829028874520675e-05, |
| "loss": 3.5132, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.782065930581389e-05, |
| "loss": 3.5228, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7812273358303364e-05, |
| "loss": 3.5254, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7803887410792844e-05, |
| "loss": 3.5192, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7795501463282324e-05, |
| "loss": 3.525, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7787131894575536e-05, |
| "loss": 3.5368, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7778745947065016e-05, |
| "loss": 3.5212, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7770359999554496e-05, |
| "loss": 3.5209, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7761974052043976e-05, |
| "loss": 3.5223, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7753604483337185e-05, |
| "loss": 3.5197, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.774521853582667e-05, |
| "loss": 3.5141, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.773683258831615e-05, |
| "loss": 3.5151, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.772844664080563e-05, |
| "loss": 3.5188, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7720077072098838e-05, |
| "loss": 3.5315, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7711691124588317e-05, |
| "loss": 3.5222, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7703305177077797e-05, |
| "loss": 3.5254, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7694919229567277e-05, |
| "loss": 3.5122, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.768654966086049e-05, |
| "loss": 3.5125, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.767816371334997e-05, |
| "loss": 3.5172, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.766977776583945e-05, |
| "loss": 3.5337, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.766139181832893e-05, |
| "loss": 3.5304, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.765302224962214e-05, |
| "loss": 3.5129, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7644636302111622e-05, |
| "loss": 3.5163, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7636250354601102e-05, |
| "loss": 3.5259, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7627864407090582e-05, |
| "loss": 3.5188, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.761949483838379e-05, |
| "loss": 3.5273, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.761110889087327e-05, |
| "loss": 3.515, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.760272294336275e-05, |
| "loss": 3.5279, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.759433699585223e-05, |
| "loss": 3.5285, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7585967427145444e-05, |
| "loss": 3.5215, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7577581479634923e-05, |
| "loss": 3.5235, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7569195532124403e-05, |
| "loss": 3.5241, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7560825963417613e-05, |
| "loss": 3.5156, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7552440015907092e-05, |
| "loss": 3.5179, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7544054068396576e-05, |
| "loss": 3.5317, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7535668120886056e-05, |
| "loss": 3.5152, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7527298552179265e-05, |
| "loss": 3.5233, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7518912604668745e-05, |
| "loss": 3.5194, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7510526657158225e-05, |
| "loss": 3.5175, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.868738889694214, |
| "eval_runtime": 303.6017, |
| "eval_samples_per_second": 1256.88, |
| "eval_steps_per_second": 39.278, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7502140709647705e-05, |
| "loss": 3.5077, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7493754762137185e-05, |
| "loss": 3.5181, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7485368814626665e-05, |
| "loss": 3.5296, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7476982867116148e-05, |
| "loss": 3.5133, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7468613298409357e-05, |
| "loss": 3.5378, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7460227350898837e-05, |
| "loss": 3.5103, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7451841403388317e-05, |
| "loss": 3.5286, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7443455455877797e-05, |
| "loss": 3.5093, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.743508588717101e-05, |
| "loss": 3.5236, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.742669993966049e-05, |
| "loss": 3.5181, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.741831399214997e-05, |
| "loss": 3.5185, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.740992804463945e-05, |
| "loss": 3.5272, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.740157485473639e-05, |
| "loss": 3.5138, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.739318890722587e-05, |
| "loss": 3.5104, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.738480295971535e-05, |
| "loss": 3.5127, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.737641701220483e-05, |
| "loss": 3.5089, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.736803106469431e-05, |
| "loss": 3.5126, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.735964511718379e-05, |
| "loss": 3.516, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.735125916967327e-05, |
| "loss": 3.5148, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.734287322216275e-05, |
| "loss": 3.538, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7334503653455963e-05, |
| "loss": 3.5216, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7326117705945443e-05, |
| "loss": 3.5247, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7317731758434923e-05, |
| "loss": 3.5205, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7309362189728132e-05, |
| "loss": 3.5239, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7300976242217612e-05, |
| "loss": 3.5171, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7292590294707092e-05, |
| "loss": 3.5162, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7284204347196572e-05, |
| "loss": 3.5177, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7275834778489784e-05, |
| "loss": 3.511, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7267448830979264e-05, |
| "loss": 3.4986, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7259062883468744e-05, |
| "loss": 3.5145, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7250676935958224e-05, |
| "loss": 3.5256, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7242307367251433e-05, |
| "loss": 3.5195, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7233921419740917e-05, |
| "loss": 3.5237, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7225535472230397e-05, |
| "loss": 3.5222, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7217149524719877e-05, |
| "loss": 3.5093, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7208779956013086e-05, |
| "loss": 3.5123, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7200394008502566e-05, |
| "loss": 3.5057, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7192008060992046e-05, |
| "loss": 3.5118, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7183622113481526e-05, |
| "loss": 3.5079, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7175252544774738e-05, |
| "loss": 3.5124, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7166866597264218e-05, |
| "loss": 3.5154, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7158480649753698e-05, |
| "loss": 3.5249, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7150094702243178e-05, |
| "loss": 3.5146, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7141725133536387e-05, |
| "loss": 3.5158, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.713333918602587e-05, |
| "loss": 3.5215, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.712495323851535e-05, |
| "loss": 3.5069, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.711656729100483e-05, |
| "loss": 3.515, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.710819772229804e-05, |
| "loss": 3.5095, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.709981177478752e-05, |
| "loss": 3.5026, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7091425827277e-05, |
| "loss": 3.5198, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.708303987976648e-05, |
| "loss": 3.5127, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.707468668986342e-05, |
| "loss": 3.5094, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.70663007423529e-05, |
| "loss": 3.5044, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.705791479484238e-05, |
| "loss": 3.5077, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704952884733186e-05, |
| "loss": 3.4925, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704114289982134e-05, |
| "loss": 3.5189, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7032756952310824e-05, |
| "loss": 3.5105, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7024371004800304e-05, |
| "loss": 3.5177, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7015985057289784e-05, |
| "loss": 3.5202, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7007615488582993e-05, |
| "loss": 3.5053, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6999229541072473e-05, |
| "loss": 3.4984, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6990843593561953e-05, |
| "loss": 3.5139, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6982457646051433e-05, |
| "loss": 3.5009, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6974088077344645e-05, |
| "loss": 3.4975, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6965702129834125e-05, |
| "loss": 3.5196, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6957316182323605e-05, |
| "loss": 3.5181, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6948930234813085e-05, |
| "loss": 3.5007, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6940560666106294e-05, |
| "loss": 3.4999, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6932174718595778e-05, |
| "loss": 3.5013, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6923788771085258e-05, |
| "loss": 3.5091, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6915402823574738e-05, |
| "loss": 3.5147, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6907033254867947e-05, |
| "loss": 3.5127, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6898647307357427e-05, |
| "loss": 3.5196, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6890261359846907e-05, |
| "loss": 3.5124, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6881875412336383e-05, |
| "loss": 3.521, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.68735058436296e-05, |
| "loss": 3.5051, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.686511989611908e-05, |
| "loss": 3.5139, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.685673394860856e-05, |
| "loss": 3.514, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6848348001098032e-05, |
| "loss": 3.5001, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6839978432391248e-05, |
| "loss": 3.5138, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.683159248488073e-05, |
| "loss": 3.5196, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.682320653737021e-05, |
| "loss": 3.5163, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6814820589859685e-05, |
| "loss": 3.4991, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.68064510211529e-05, |
| "loss": 3.4937, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.679806507364238e-05, |
| "loss": 3.5093, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6789679126131857e-05, |
| "loss": 3.5044, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6781293178621337e-05, |
| "loss": 3.5164, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6772923609914553e-05, |
| "loss": 3.5096, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6764537662404033e-05, |
| "loss": 3.5108, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6756151714893506e-05, |
| "loss": 3.5079, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6747765767382986e-05, |
| "loss": 3.501, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6739396198676202e-05, |
| "loss": 3.5063, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6731010251165685e-05, |
| "loss": 3.5049, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.672262430365516e-05, |
| "loss": 3.5206, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6714238356144638e-05, |
| "loss": 3.5239, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6705868787437854e-05, |
| "loss": 3.5158, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.669748283992733e-05, |
| "loss": 3.4981, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.668909689241681e-05, |
| "loss": 3.5143, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.668071094490629e-05, |
| "loss": 3.5049, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6672341376199506e-05, |
| "loss": 3.5168, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.666395542868898e-05, |
| "loss": 3.502, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.665556948117846e-05, |
| "loss": 3.5086, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.664718353366794e-05, |
| "loss": 3.4987, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6638813964961155e-05, |
| "loss": 3.5184, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6630428017450632e-05, |
| "loss": 3.4927, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6622042069940112e-05, |
| "loss": 3.5098, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6613656122429592e-05, |
| "loss": 3.5024, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.66052865537228e-05, |
| "loss": 3.5072, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6596900606212284e-05, |
| "loss": 3.5016, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6588514658701764e-05, |
| "loss": 3.5149, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6580128711191244e-05, |
| "loss": 3.507, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6571759142484453e-05, |
| "loss": 3.5059, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6563373194973933e-05, |
| "loss": 3.5144, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6554987247463413e-05, |
| "loss": 3.5016, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6546601299952893e-05, |
| "loss": 3.5158, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6538231731246106e-05, |
| "loss": 3.5274, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6529845783735586e-05, |
| "loss": 3.5051, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6521459836225066e-05, |
| "loss": 3.5071, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6513073888714546e-05, |
| "loss": 3.5107, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6504704320007755e-05, |
| "loss": 3.5064, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6496318372497238e-05, |
| "loss": 3.5005, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6487932424986718e-05, |
| "loss": 3.5038, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6479546477476198e-05, |
| "loss": 3.5045, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6471176908769407e-05, |
| "loss": 3.5198, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6462790961258887e-05, |
| "loss": 3.5089, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6454405013748367e-05, |
| "loss": 3.5097, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6446019066237847e-05, |
| "loss": 3.4995, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.643764949753106e-05, |
| "loss": 3.5029, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.642926355002054e-05, |
| "loss": 3.5035, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.642087760251002e-05, |
| "loss": 3.5223, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.64124916549995e-05, |
| "loss": 3.5195, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.640412208629271e-05, |
| "loss": 3.5049, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.639573613878219e-05, |
| "loss": 3.4988, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.638735019127167e-05, |
| "loss": 3.5122, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.637896424376115e-05, |
| "loss": 3.5046, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.637059467505436e-05, |
| "loss": 3.5157, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.636220872754384e-05, |
| "loss": 3.4992, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.635382278003332e-05, |
| "loss": 3.5191, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.63454368325228e-05, |
| "loss": 3.5116, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6337067263816013e-05, |
| "loss": 3.5137, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6328681316305493e-05, |
| "loss": 3.5108, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6320295368794973e-05, |
| "loss": 3.5097, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6311909421284453e-05, |
| "loss": 3.5021, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6303539852577662e-05, |
| "loss": 3.5073, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6295153905067142e-05, |
| "loss": 3.5176, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6286767957556625e-05, |
| "loss": 3.5063, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6278382010046105e-05, |
| "loss": 3.5072, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6270012441339314e-05, |
| "loss": 3.5066, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6261626493828794e-05, |
| "loss": 3.5057, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8702433109283447, |
| "eval_runtime": 308.3176, |
| "eval_samples_per_second": 1237.656, |
| "eval_steps_per_second": 38.678, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6253240546318274e-05, |
| "loss": 3.4955, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6244854598807754e-05, |
| "loss": 3.5007, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6236468651297234e-05, |
| "loss": 3.5183, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6228082703786718e-05, |
| "loss": 3.5017, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6219713135079927e-05, |
| "loss": 3.5281, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6211327187569407e-05, |
| "loss": 3.4945, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6202941240058887e-05, |
| "loss": 3.5176, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6194555292548367e-05, |
| "loss": 3.4998, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.618618572384158e-05, |
| "loss": 3.5081, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.617779977633106e-05, |
| "loss": 3.508, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.616941382882054e-05, |
| "loss": 3.505, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.616102788131002e-05, |
| "loss": 3.5152, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6152674691406957e-05, |
| "loss": 3.5034, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.614428874389644e-05, |
| "loss": 3.4962, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.613590279638592e-05, |
| "loss": 3.5034, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.61275168488754e-05, |
| "loss": 3.4935, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.611913090136488e-05, |
| "loss": 3.5015, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.611074495385436e-05, |
| "loss": 3.5028, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.610235900634384e-05, |
| "loss": 3.5007, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.609397305883332e-05, |
| "loss": 3.525, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6085603490126533e-05, |
| "loss": 3.5121, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6077217542616013e-05, |
| "loss": 3.5151, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6068831595105493e-05, |
| "loss": 3.5064, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6060445647594973e-05, |
| "loss": 3.5094, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.605207607888818e-05, |
| "loss": 3.5037, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.604369013137766e-05, |
| "loss": 3.5043, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.603530418386714e-05, |
| "loss": 3.5078, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6026934615160354e-05, |
| "loss": 3.4961, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6018548667649834e-05, |
| "loss": 3.4916, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6010162720139314e-05, |
| "loss": 3.4993, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6001776772628794e-05, |
| "loss": 3.5131, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5993407203922003e-05, |
| "loss": 3.5076, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5985021256411486e-05, |
| "loss": 3.5123, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5976635308900966e-05, |
| "loss": 3.5103, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5968249361390446e-05, |
| "loss": 3.4927, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5959879792683655e-05, |
| "loss": 3.5028, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5951493845173135e-05, |
| "loss": 3.4962, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5943107897662615e-05, |
| "loss": 3.4946, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5934721950152095e-05, |
| "loss": 3.4964, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5926352381445308e-05, |
| "loss": 3.4956, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5917966433934788e-05, |
| "loss": 3.5061, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5909580486424268e-05, |
| "loss": 3.5105, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5901194538913748e-05, |
| "loss": 3.5048, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5892824970206957e-05, |
| "loss": 3.5001, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.588443902269644e-05, |
| "loss": 3.5109, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.587605307518592e-05, |
| "loss": 3.4968, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.58676671276754e-05, |
| "loss": 3.5043, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.585929755896861e-05, |
| "loss": 3.4969, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.585091161145809e-05, |
| "loss": 3.487, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.584252566394757e-05, |
| "loss": 3.5099, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.583413971643705e-05, |
| "loss": 3.5005, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.582577014773026e-05, |
| "loss": 3.5007, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.581740057902347e-05, |
| "loss": 3.4903, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.580901463151295e-05, |
| "loss": 3.4946, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.580062868400243e-05, |
| "loss": 3.4814, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.579224273649191e-05, |
| "loss": 3.5038, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5783856788981394e-05, |
| "loss": 3.5007, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5775470841470874e-05, |
| "loss": 3.5037, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5767084893960354e-05, |
| "loss": 3.5078, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5758715325253563e-05, |
| "loss": 3.4948, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5750329377743043e-05, |
| "loss": 3.4867, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5741943430232523e-05, |
| "loss": 3.5025, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5733557482722003e-05, |
| "loss": 3.4889, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5725187914015215e-05, |
| "loss": 3.4866, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5716801966504695e-05, |
| "loss": 3.5056, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5708416018994175e-05, |
| "loss": 3.5013, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5700030071483648e-05, |
| "loss": 3.4901, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5691660502776864e-05, |
| "loss": 3.4901, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5683274555266344e-05, |
| "loss": 3.4829, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5674888607755827e-05, |
| "loss": 3.4922, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.56665026602453e-05, |
| "loss": 3.5045, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5658133091538516e-05, |
| "loss": 3.5027, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5649747144027996e-05, |
| "loss": 3.5053, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5641361196517476e-05, |
| "loss": 3.4977, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5632975249006953e-05, |
| "loss": 3.5128, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.562460568030017e-05, |
| "loss": 3.4909, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.561621973278965e-05, |
| "loss": 3.5062, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5607833785279122e-05, |
| "loss": 3.4981, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5599447837768602e-05, |
| "loss": 3.4868, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5591078269061818e-05, |
| "loss": 3.5079, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5582692321551298e-05, |
| "loss": 3.5052, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5574306374040774e-05, |
| "loss": 3.5015, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5565920426530254e-05, |
| "loss": 3.4894, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.555755085782347e-05, |
| "loss": 3.4838, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.554916491031295e-05, |
| "loss": 3.4951, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5540778962802427e-05, |
| "loss": 3.4946, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5532393015291906e-05, |
| "loss": 3.5051, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5524023446585122e-05, |
| "loss": 3.4928, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5515637499074596e-05, |
| "loss": 3.5005, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5507251551564075e-05, |
| "loss": 3.4931, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5498865604053555e-05, |
| "loss": 3.4939, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.549049603534677e-05, |
| "loss": 3.4921, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5482110087836248e-05, |
| "loss": 3.4915, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5473724140325728e-05, |
| "loss": 3.5097, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5465338192815208e-05, |
| "loss": 3.5062, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5456968624108424e-05, |
| "loss": 3.509, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5448582676597897e-05, |
| "loss": 3.4829, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.544019672908738e-05, |
| "loss": 3.5016, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.543181078157686e-05, |
| "loss": 3.4936, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.542344121287007e-05, |
| "loss": 3.503, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.541505526535955e-05, |
| "loss": 3.493, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.540666931784903e-05, |
| "loss": 3.494, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.539828337033851e-05, |
| "loss": 3.4866, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.538991380163172e-05, |
| "loss": 3.5081, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.53815278541212e-05, |
| "loss": 3.481, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.537314190661068e-05, |
| "loss": 3.4951, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.536475595910016e-05, |
| "loss": 3.4934, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.535638639039337e-05, |
| "loss": 3.4946, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.534800044288285e-05, |
| "loss": 3.4869, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5339614495372334e-05, |
| "loss": 3.5044, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5331228547861814e-05, |
| "loss": 3.4943, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5322858979155023e-05, |
| "loss": 3.491, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5314473031644503e-05, |
| "loss": 3.5047, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5306087084133983e-05, |
| "loss": 3.4922, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5297701136623463e-05, |
| "loss": 3.504, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5289331567916675e-05, |
| "loss": 3.5134, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5280945620406155e-05, |
| "loss": 3.4973, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5272559672895635e-05, |
| "loss": 3.4958, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5264173725385115e-05, |
| "loss": 3.4945, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5255804156678324e-05, |
| "loss": 3.4943, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5247418209167804e-05, |
| "loss": 3.491, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5239032261657288e-05, |
| "loss": 3.4966, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5230646314146767e-05, |
| "loss": 3.4862, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5222276745439977e-05, |
| "loss": 3.5069, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5213890797929457e-05, |
| "loss": 3.4997, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5205504850418936e-05, |
| "loss": 3.5, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5197118902908416e-05, |
| "loss": 3.4846, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.518874933420163e-05, |
| "loss": 3.4891, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.518036338669111e-05, |
| "loss": 3.4917, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.517197743918059e-05, |
| "loss": 3.5044, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.516359149167007e-05, |
| "loss": 3.5077, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5155221922963278e-05, |
| "loss": 3.497, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5146835975452758e-05, |
| "loss": 3.4819, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.513845002794224e-05, |
| "loss": 3.5014, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.513006408043172e-05, |
| "loss": 3.4893, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.512169451172493e-05, |
| "loss": 3.5052, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.511330856421441e-05, |
| "loss": 3.4872, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.510492261670389e-05, |
| "loss": 3.5036, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.509653666919337e-05, |
| "loss": 3.5017, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5088167100486583e-05, |
| "loss": 3.5043, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5079781152976063e-05, |
| "loss": 3.4993, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5071395205465543e-05, |
| "loss": 3.4958, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5063009257955022e-05, |
| "loss": 3.4913, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.505463968924823e-05, |
| "loss": 3.4942, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.504625374173771e-05, |
| "loss": 3.5034, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5037867794227195e-05, |
| "loss": 3.5003, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5029481846716675e-05, |
| "loss": 3.4956, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5021112278009884e-05, |
| "loss": 3.4965, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5012726330499364e-05, |
| "loss": 3.4924, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8721048831939697, |
| "eval_runtime": 309.2295, |
| "eval_samples_per_second": 1234.006, |
| "eval_steps_per_second": 38.564, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.5004340382988844e-05, |
| "loss": 3.4882, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4995954435478324e-05, |
| "loss": 3.4915, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4987584866771536e-05, |
| "loss": 3.5039, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4979198919261016e-05, |
| "loss": 3.491, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4970812971750496e-05, |
| "loss": 3.5113, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4962427024239976e-05, |
| "loss": 3.4919, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4954057455533185e-05, |
| "loss": 3.4988, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4945671508022665e-05, |
| "loss": 3.4906, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.493728556051215e-05, |
| "loss": 3.4941, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.492889961300163e-05, |
| "loss": 3.4946, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4920530044294838e-05, |
| "loss": 3.4941, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4912144096784318e-05, |
| "loss": 3.5005, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4903774528077527e-05, |
| "loss": 3.4929, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.489538858056701e-05, |
| "loss": 3.483, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.488700263305649e-05, |
| "loss": 3.491, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.487861668554597e-05, |
| "loss": 3.4827, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.487023073803545e-05, |
| "loss": 3.4904, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.486184479052493e-05, |
| "loss": 3.4909, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.485345884301441e-05, |
| "loss": 3.4897, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4845072895503886e-05, |
| "loss": 3.5138, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4836703326797102e-05, |
| "loss": 3.4969, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4828317379286582e-05, |
| "loss": 3.5034, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.481993143177606e-05, |
| "loss": 3.4946, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.481154548426554e-05, |
| "loss": 3.4996, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.480317591555875e-05, |
| "loss": 3.4896, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.479478996804823e-05, |
| "loss": 3.4984, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.478640402053771e-05, |
| "loss": 3.4925, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.477801807302719e-05, |
| "loss": 3.4872, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4769648504320404e-05, |
| "loss": 3.4815, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4761262556809883e-05, |
| "loss": 3.4861, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.475287660929936e-05, |
| "loss": 3.4953, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.474449066178884e-05, |
| "loss": 3.4985, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4736121093082052e-05, |
| "loss": 3.4983, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4727735145571532e-05, |
| "loss": 3.4986, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4719349198061012e-05, |
| "loss": 3.4819, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4710963250550492e-05, |
| "loss": 3.4931, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4702593681843705e-05, |
| "loss": 3.487, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.469420773433318e-05, |
| "loss": 3.4803, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4685821786822665e-05, |
| "loss": 3.4872, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4677435839312145e-05, |
| "loss": 3.4817, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4669066270605357e-05, |
| "loss": 3.4942, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4660680323094834e-05, |
| "loss": 3.4964, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4652294375584314e-05, |
| "loss": 3.4934, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4643908428073794e-05, |
| "loss": 3.4864, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4635538859367006e-05, |
| "loss": 3.5015, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4627152911856486e-05, |
| "loss": 3.4866, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4618766964345966e-05, |
| "loss": 3.4938, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4610381016835446e-05, |
| "loss": 3.4839, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4602011448128655e-05, |
| "loss": 3.4729, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4593625500618135e-05, |
| "loss": 3.5, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.458523955310762e-05, |
| "loss": 3.4873, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.45768536055971e-05, |
| "loss": 3.4874, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.456850041569404e-05, |
| "loss": 3.4807, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.456011446818352e-05, |
| "loss": 3.479, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4551728520673e-05, |
| "loss": 3.473, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.454334257316248e-05, |
| "loss": 3.4868, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.453495662565196e-05, |
| "loss": 3.4904, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.452657067814144e-05, |
| "loss": 3.4914, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.451818473063092e-05, |
| "loss": 3.4958, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.45097987831204e-05, |
| "loss": 3.4845, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.450142921441361e-05, |
| "loss": 3.4728, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.449304326690309e-05, |
| "loss": 3.4938, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4484657319392572e-05, |
| "loss": 3.4764, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4476271371882052e-05, |
| "loss": 3.4718, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.446790180317526e-05, |
| "loss": 3.4981, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445951585566474e-05, |
| "loss": 3.4881, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445112990815422e-05, |
| "loss": 3.4789, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.44427439606437e-05, |
| "loss": 3.4787, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4434374391936913e-05, |
| "loss": 3.4727, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4425988444426393e-05, |
| "loss": 3.4796, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4417602496915873e-05, |
| "loss": 3.4953, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4409216549405353e-05, |
| "loss": 3.4895, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4400846980698562e-05, |
| "loss": 3.4923, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4392461033188042e-05, |
| "loss": 3.4857, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4384075085677526e-05, |
| "loss": 3.5034, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4375705516970735e-05, |
| "loss": 3.4813, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4367319569460215e-05, |
| "loss": 3.4935, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4358933621949695e-05, |
| "loss": 3.4863, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4350547674439175e-05, |
| "loss": 3.4705, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4342161726928655e-05, |
| "loss": 3.4974, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4333792158221867e-05, |
| "loss": 3.4945, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4325406210711347e-05, |
| "loss": 3.4879, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4317020263200827e-05, |
| "loss": 3.4778, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4308634315690307e-05, |
| "loss": 3.4712, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4300264746983516e-05, |
| "loss": 3.4846, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4291878799472996e-05, |
| "loss": 3.4821, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4283492851962476e-05, |
| "loss": 3.4949, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.427512328325569e-05, |
| "loss": 3.482, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.426673733574517e-05, |
| "loss": 3.4863, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.425835138823465e-05, |
| "loss": 3.481, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.424996544072413e-05, |
| "loss": 3.4847, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.424159587201734e-05, |
| "loss": 3.4779, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.423320992450682e-05, |
| "loss": 3.4817, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.42248239769963e-05, |
| "loss": 3.4939, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.421643802948578e-05, |
| "loss": 3.4951, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4208052081975257e-05, |
| "loss": 3.4977, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.419968251326847e-05, |
| "loss": 3.4721, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.419129656575795e-05, |
| "loss": 3.4915, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.418291061824743e-05, |
| "loss": 3.4803, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.417452467073691e-05, |
| "loss": 3.4902, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4166155102030122e-05, |
| "loss": 3.4823, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4157769154519602e-05, |
| "loss": 3.4805, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.414938320700908e-05, |
| "loss": 3.4798, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.414099725949856e-05, |
| "loss": 3.4924, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4132627690791774e-05, |
| "loss": 3.4692, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4124241743281254e-05, |
| "loss": 3.4838, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.411585579577073e-05, |
| "loss": 3.4775, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.410746984826021e-05, |
| "loss": 3.4864, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4099100279553423e-05, |
| "loss": 3.4762, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4090714332042903e-05, |
| "loss": 3.4897, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4082328384532383e-05, |
| "loss": 3.4848, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4073942437021863e-05, |
| "loss": 3.481, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4065572868315076e-05, |
| "loss": 3.492, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4057186920804552e-05, |
| "loss": 3.4811, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4048800973294032e-05, |
| "loss": 3.4903, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4040415025783512e-05, |
| "loss": 3.5, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4032045457076728e-05, |
| "loss": 3.4829, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4023659509566205e-05, |
| "loss": 3.4835, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4015273562055685e-05, |
| "loss": 3.4847, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4006887614545165e-05, |
| "loss": 3.4827, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3998518045838374e-05, |
| "loss": 3.4795, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3990132098327857e-05, |
| "loss": 3.4839, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3981746150817337e-05, |
| "loss": 3.4759, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3973360203306817e-05, |
| "loss": 3.4954, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3964990634600026e-05, |
| "loss": 3.4865, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3956604687089506e-05, |
| "loss": 3.4916, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3948218739578986e-05, |
| "loss": 3.4743, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3939832792068466e-05, |
| "loss": 3.4795, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.393146322336168e-05, |
| "loss": 3.4818, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.392307727585116e-05, |
| "loss": 3.4891, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.391469132834064e-05, |
| "loss": 3.4947, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3906305380830118e-05, |
| "loss": 3.4885, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3897935812123327e-05, |
| "loss": 3.4721, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.388954986461281e-05, |
| "loss": 3.491, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.388116391710229e-05, |
| "loss": 3.4783, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.387277796959177e-05, |
| "loss": 3.4922, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.386440840088498e-05, |
| "loss": 3.4756, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.385602245337446e-05, |
| "loss": 3.4932, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.384763650586394e-05, |
| "loss": 3.4913, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.383925055835342e-05, |
| "loss": 3.4946, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3830880989646632e-05, |
| "loss": 3.4858, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3822495042136112e-05, |
| "loss": 3.4837, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3814109094625592e-05, |
| "loss": 3.4786, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3805723147115072e-05, |
| "loss": 3.486, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.379735357840828e-05, |
| "loss": 3.4882, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.378896763089776e-05, |
| "loss": 3.489, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3780581683387244e-05, |
| "loss": 3.4837, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3772195735876724e-05, |
| "loss": 3.4816, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3763826167169933e-05, |
| "loss": 3.4828, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8730223178863525, |
| "eval_runtime": 308.6717, |
| "eval_samples_per_second": 1236.236, |
| "eval_steps_per_second": 38.633, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3755440219659413e-05, |
| "loss": 3.4751, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3747054272148893e-05, |
| "loss": 3.4788, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3738668324638373e-05, |
| "loss": 3.4901, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3730282377127853e-05, |
| "loss": 3.4787, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3721912808421066e-05, |
| "loss": 3.4969, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3713526860910546e-05, |
| "loss": 3.4812, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3705140913400026e-05, |
| "loss": 3.4858, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3696754965889502e-05, |
| "loss": 3.4837, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3688385397182715e-05, |
| "loss": 3.4814, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3679999449672198e-05, |
| "loss": 3.4803, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3671613502161678e-05, |
| "loss": 3.4861, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3663227554651155e-05, |
| "loss": 3.4858, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3654874364748096e-05, |
| "loss": 3.4822, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3646488417237576e-05, |
| "loss": 3.4715, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.363810246972706e-05, |
| "loss": 3.482, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.362971652221654e-05, |
| "loss": 3.4678, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.362133057470602e-05, |
| "loss": 3.4765, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.36129446271955e-05, |
| "loss": 3.482, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3604558679684976e-05, |
| "loss": 3.4782, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3596172732174456e-05, |
| "loss": 3.5, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.358780316346767e-05, |
| "loss": 3.4876, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.357941721595715e-05, |
| "loss": 3.4918, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3571031268446628e-05, |
| "loss": 3.4824, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3562645320936108e-05, |
| "loss": 3.4857, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.355427575222932e-05, |
| "loss": 3.4798, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3545889804718797e-05, |
| "loss": 3.4864, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.353750385720828e-05, |
| "loss": 3.4798, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.352911790969776e-05, |
| "loss": 3.478, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3520748340990973e-05, |
| "loss": 3.4708, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.351236239348045e-05, |
| "loss": 3.475, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.350397644596993e-05, |
| "loss": 3.4802, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.349559049845941e-05, |
| "loss": 3.4851, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3487220929752622e-05, |
| "loss": 3.4846, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3478834982242102e-05, |
| "loss": 3.4881, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3470449034731582e-05, |
| "loss": 3.4716, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3462079466024794e-05, |
| "loss": 3.4799, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.345369351851427e-05, |
| "loss": 3.4732, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.344530757100375e-05, |
| "loss": 3.4692, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3436921623493234e-05, |
| "loss": 3.4797, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3428535675982714e-05, |
| "loss": 3.4653, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3420166107275923e-05, |
| "loss": 3.4826, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3411780159765403e-05, |
| "loss": 3.4847, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3403394212254883e-05, |
| "loss": 3.4858, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3395024643548096e-05, |
| "loss": 3.4745, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3386638696037576e-05, |
| "loss": 3.4903, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3378252748527056e-05, |
| "loss": 3.4711, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3369866801016536e-05, |
| "loss": 3.4856, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3361480853506016e-05, |
| "loss": 3.4718, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3353111284799225e-05, |
| "loss": 3.4561, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3344725337288705e-05, |
| "loss": 3.491, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3336339389778185e-05, |
| "loss": 3.4744, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3327953442267668e-05, |
| "loss": 3.4804, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.331960025236461e-05, |
| "loss": 3.4668, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.331121430485409e-05, |
| "loss": 3.4678, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.330282835734357e-05, |
| "loss": 3.4673, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.329444240983305e-05, |
| "loss": 3.4721, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.328605646232253e-05, |
| "loss": 3.4818, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.327767051481201e-05, |
| "loss": 3.4764, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.326928456730149e-05, |
| "loss": 3.4885, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.326089861979097e-05, |
| "loss": 3.4706, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.325252905108418e-05, |
| "loss": 3.4627, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3244143103573658e-05, |
| "loss": 3.4783, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3235757156063138e-05, |
| "loss": 3.4614, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.322737120855262e-05, |
| "loss": 3.4606, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.321900163984583e-05, |
| "loss": 3.4852, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.321061569233531e-05, |
| "loss": 3.4765, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.320222974482479e-05, |
| "loss": 3.467, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.319384379731427e-05, |
| "loss": 3.4698, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3185474228607483e-05, |
| "loss": 3.4615, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3177088281096963e-05, |
| "loss": 3.4673, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3168702333586443e-05, |
| "loss": 3.4825, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3160316386075923e-05, |
| "loss": 3.4793, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3151946817369132e-05, |
| "loss": 3.481, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3143560869858612e-05, |
| "loss": 3.4753, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3135174922348092e-05, |
| "loss": 3.4914, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3126788974837575e-05, |
| "loss": 3.4712, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3118419406130784e-05, |
| "loss": 3.4824, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3110033458620264e-05, |
| "loss": 3.4727, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3101647511109744e-05, |
| "loss": 3.4585, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.309326156359922e-05, |
| "loss": 3.4853, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3084891994892437e-05, |
| "loss": 3.483, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3076506047381917e-05, |
| "loss": 3.4772, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3068120099871397e-05, |
| "loss": 3.4694, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3059734152360873e-05, |
| "loss": 3.4636, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3051364583654086e-05, |
| "loss": 3.4673, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3042978636143566e-05, |
| "loss": 3.4744, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3034592688633046e-05, |
| "loss": 3.482, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3026206741122526e-05, |
| "loss": 3.4667, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3017837172415738e-05, |
| "loss": 3.478, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3009451224905218e-05, |
| "loss": 3.4686, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3001065277394695e-05, |
| "loss": 3.4729, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2992679329884174e-05, |
| "loss": 3.4667, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.298430976117739e-05, |
| "loss": 3.4698, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.297592381366687e-05, |
| "loss": 3.4834, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2967537866156347e-05, |
| "loss": 3.4853, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2959151918645827e-05, |
| "loss": 3.4853, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.295078234993904e-05, |
| "loss": 3.4598, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.294239640242852e-05, |
| "loss": 3.4817, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2934010454918e-05, |
| "loss": 3.4665, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.292562450740748e-05, |
| "loss": 3.4821, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.291725493870069e-05, |
| "loss": 3.4699, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2908868991190168e-05, |
| "loss": 3.4732, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2900483043679648e-05, |
| "loss": 3.4641, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2892097096169128e-05, |
| "loss": 3.4792, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2883727527462344e-05, |
| "loss": 3.4596, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.287534157995182e-05, |
| "loss": 3.4767, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.28669556324413e-05, |
| "loss": 3.4631, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.285856968493078e-05, |
| "loss": 3.477, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2850200116223993e-05, |
| "loss": 3.4702, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2841814168713473e-05, |
| "loss": 3.4743, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2833428221202953e-05, |
| "loss": 3.4708, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2825042273692433e-05, |
| "loss": 3.4766, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2816672704985642e-05, |
| "loss": 3.476, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2808286757475122e-05, |
| "loss": 3.4727, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2799900809964602e-05, |
| "loss": 3.4744, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2791514862454082e-05, |
| "loss": 3.4923, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2783145293747294e-05, |
| "loss": 3.4709, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2774759346236774e-05, |
| "loss": 3.469, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2766373398726254e-05, |
| "loss": 3.4755, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2757987451215734e-05, |
| "loss": 3.4729, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2749617882508943e-05, |
| "loss": 3.4617, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2741231934998423e-05, |
| "loss": 3.4757, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2732845987487907e-05, |
| "loss": 3.4643, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2724460039977387e-05, |
| "loss": 3.4836, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2716090471270596e-05, |
| "loss": 3.4787, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2707704523760076e-05, |
| "loss": 3.4758, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2699318576249556e-05, |
| "loss": 3.4664, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2690932628739035e-05, |
| "loss": 3.4648, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2682563060032248e-05, |
| "loss": 3.4711, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2674177112521728e-05, |
| "loss": 3.4807, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2665791165011208e-05, |
| "loss": 3.4764, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2657405217500688e-05, |
| "loss": 3.4817, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2649035648793897e-05, |
| "loss": 3.4585, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2640649701283377e-05, |
| "loss": 3.4797, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.263226375377286e-05, |
| "loss": 3.4673, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.262387780626234e-05, |
| "loss": 3.4811, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.261550823755555e-05, |
| "loss": 3.4647, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.260712229004503e-05, |
| "loss": 3.4847, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.259873634253451e-05, |
| "loss": 3.4766, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.259035039502399e-05, |
| "loss": 3.4844, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.25819808263172e-05, |
| "loss": 3.4674, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.257359487880668e-05, |
| "loss": 3.4806, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.256520893129616e-05, |
| "loss": 3.4604, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.255682298378564e-05, |
| "loss": 3.476, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.254845341507885e-05, |
| "loss": 3.4789, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.254006746756833e-05, |
| "loss": 3.4711, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2531681520057814e-05, |
| "loss": 3.4785, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2523295572547294e-05, |
| "loss": 3.4661, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2514926003840503e-05, |
| "loss": 3.473, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8742332458496094, |
| "eval_runtime": 303.1316, |
| "eval_samples_per_second": 1258.83, |
| "eval_steps_per_second": 39.339, |
| "step": 1679040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2506540056329983e-05, |
| "loss": 3.4661, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2498154108819463e-05, |
| "loss": 3.4657, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2489768161308943e-05, |
| "loss": 3.4765, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2481382213798423e-05, |
| "loss": 3.4674, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2473012645091635e-05, |
| "loss": 3.4852, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2464626697581115e-05, |
| "loss": 3.4682, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2456240750070592e-05, |
| "loss": 3.4736, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2447854802560072e-05, |
| "loss": 3.4738, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2439485233853284e-05, |
| "loss": 3.4691, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2431099286342768e-05, |
| "loss": 3.4695, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2422713338832244e-05, |
| "loss": 3.4729, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2414327391321724e-05, |
| "loss": 3.4752, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2405974201418666e-05, |
| "loss": 3.4719, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2397588253908146e-05, |
| "loss": 3.4613, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.238920230639763e-05, |
| "loss": 3.4709, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.238081635888711e-05, |
| "loss": 3.4548, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.237243041137659e-05, |
| "loss": 3.4659, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2364044463866065e-05, |
| "loss": 3.4684, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2355658516355545e-05, |
| "loss": 3.4646, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2347272568845025e-05, |
| "loss": 3.4893, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2338903000138238e-05, |
| "loss": 3.4784, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2330517052627718e-05, |
| "loss": 3.4815, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2322131105117198e-05, |
| "loss": 3.4678, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2313745157606678e-05, |
| "loss": 3.4733, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.230537558889989e-05, |
| "loss": 3.4725, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2296989641389367e-05, |
| "loss": 3.474, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2288603693878847e-05, |
| "loss": 3.4674, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.228021774636833e-05, |
| "loss": 3.4712, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.227184817766154e-05, |
| "loss": 3.4582, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.226346223015102e-05, |
| "loss": 3.4653, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.22550762826405e-05, |
| "loss": 3.4681, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.224669033512998e-05, |
| "loss": 3.476, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.223832076642319e-05, |
| "loss": 3.476, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.222993481891267e-05, |
| "loss": 3.4775, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.222154887140215e-05, |
| "loss": 3.4617, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.221316292389163e-05, |
| "loss": 3.4649, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.220479335518484e-05, |
| "loss": 3.4672, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.219640740767432e-05, |
| "loss": 3.4595, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.21880214601638e-05, |
| "loss": 3.4638, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2179651891457013e-05, |
| "loss": 3.4585, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2171265943946493e-05, |
| "loss": 3.4697, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2162879996435973e-05, |
| "loss": 3.4735, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2154494048925453e-05, |
| "loss": 3.472, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2146124480218662e-05, |
| "loss": 3.4648, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2137738532708145e-05, |
| "loss": 3.4746, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2129352585197625e-05, |
| "loss": 3.4653, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2120966637687105e-05, |
| "loss": 3.4711, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2112580690176585e-05, |
| "loss": 3.461, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2104211121469794e-05, |
| "loss": 3.4459, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2095825173959274e-05, |
| "loss": 3.4854, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2087439226448754e-05, |
| "loss": 3.4615, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2079053278938237e-05, |
| "loss": 3.4704, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.207070008903518e-05, |
| "loss": 3.4534, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.206231414152466e-05, |
| "loss": 3.4593, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2053928194014136e-05, |
| "loss": 3.4495, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2045542246503616e-05, |
| "loss": 3.4611, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.20371562989931e-05, |
| "loss": 3.468, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.202877035148258e-05, |
| "loss": 3.4676, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.202038440397206e-05, |
| "loss": 3.477, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.201199845646154e-05, |
| "loss": 3.4602, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2003628887754748e-05, |
| "loss": 3.4522, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1995242940244228e-05, |
| "loss": 3.4734, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1986856992733708e-05, |
| "loss": 3.4514, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.197847104522319e-05, |
| "loss": 3.4471, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.19701014765164e-05, |
| "loss": 3.4736, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.196171552900588e-05, |
| "loss": 3.4644, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.195332958149536e-05, |
| "loss": 3.4547, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.194494363398484e-05, |
| "loss": 3.4607, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1936574065278053e-05, |
| "loss": 3.4522, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1928188117767533e-05, |
| "loss": 3.4556, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1919802170257012e-05, |
| "loss": 3.4699, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.191143260155022e-05, |
| "loss": 3.4658, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.19030466540397e-05, |
| "loss": 3.4731, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.189466070652918e-05, |
| "loss": 3.4667, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.188627475901866e-05, |
| "loss": 3.4758, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1877905190311874e-05, |
| "loss": 3.4574, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1869519242801354e-05, |
| "loss": 3.4754, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1861133295290834e-05, |
| "loss": 3.4579, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1852747347780314e-05, |
| "loss": 3.4541, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1844377779073523e-05, |
| "loss": 3.4701, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1835991831563006e-05, |
| "loss": 3.4747, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1827605884052486e-05, |
| "loss": 3.4665, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1819219936541963e-05, |
| "loss": 3.4595, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1810833989031443e-05, |
| "loss": 3.4518, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1802464420324655e-05, |
| "loss": 3.4554, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1794078472814135e-05, |
| "loss": 3.4628, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1785692525303615e-05, |
| "loss": 3.4702, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1777306577793095e-05, |
| "loss": 3.4571, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1768937009086308e-05, |
| "loss": 3.4673, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1760551061575787e-05, |
| "loss": 3.4509, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1752165114065264e-05, |
| "loss": 3.4657, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1743779166554744e-05, |
| "loss": 3.4563, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.173540959784796e-05, |
| "loss": 3.459, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1727023650337436e-05, |
| "loss": 3.4721, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1718637702826916e-05, |
| "loss": 3.4733, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.171026813412013e-05, |
| "loss": 3.4761, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.170188218660961e-05, |
| "loss": 3.4487, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1693496239099085e-05, |
| "loss": 3.4697, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.168511029158857e-05, |
| "loss": 3.4544, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.167672434407805e-05, |
| "loss": 3.4705, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.166835477537126e-05, |
| "loss": 3.4573, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1659968827860738e-05, |
| "loss": 3.466, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1651582880350218e-05, |
| "loss": 3.4537, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.164321331164343e-05, |
| "loss": 3.4655, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.163482736413291e-05, |
| "loss": 3.4522, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.162644141662239e-05, |
| "loss": 3.4662, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.161805546911187e-05, |
| "loss": 3.4499, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1609685900405083e-05, |
| "loss": 3.4672, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.160129995289456e-05, |
| "loss": 3.4567, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.159291400538404e-05, |
| "loss": 3.4613, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1584528057873522e-05, |
| "loss": 3.4579, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1576158489166735e-05, |
| "loss": 3.4647, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.156777254165621e-05, |
| "loss": 3.4648, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.155938659414569e-05, |
| "loss": 3.4604, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.155100064663517e-05, |
| "loss": 3.4651, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1542631077928384e-05, |
| "loss": 3.4767, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1534245130417864e-05, |
| "loss": 3.4655, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1525859182907344e-05, |
| "loss": 3.4601, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1517473235396824e-05, |
| "loss": 3.4585, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1509103666690033e-05, |
| "loss": 3.4643, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1500717719179513e-05, |
| "loss": 3.4521, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1492331771668993e-05, |
| "loss": 3.4643, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1483945824158476e-05, |
| "loss": 3.451, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1475576255451685e-05, |
| "loss": 3.4725, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1467190307941165e-05, |
| "loss": 3.4639, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1458804360430645e-05, |
| "loss": 3.4669, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1450418412920125e-05, |
| "loss": 3.4544, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1442048844213338e-05, |
| "loss": 3.4535, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1433662896702817e-05, |
| "loss": 3.4595, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1425276949192297e-05, |
| "loss": 3.4722, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1416891001681777e-05, |
| "loss": 3.4649, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1408521432974987e-05, |
| "loss": 3.4666, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1400135485464466e-05, |
| "loss": 3.451, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1391749537953946e-05, |
| "loss": 3.4701, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.138336359044343e-05, |
| "loss": 3.455, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.137499402173664e-05, |
| "loss": 3.4709, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.136660807422612e-05, |
| "loss": 3.4518, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.13582221267156e-05, |
| "loss": 3.4709, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.134983617920508e-05, |
| "loss": 3.4626, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.134146661049829e-05, |
| "loss": 3.4774, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.133308066298777e-05, |
| "loss": 3.4561, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.132469471547725e-05, |
| "loss": 3.4649, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.131630876796673e-05, |
| "loss": 3.4529, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.130793919925994e-05, |
| "loss": 3.465, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.129955325174942e-05, |
| "loss": 3.4645, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.12911673042389e-05, |
| "loss": 3.4605, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1282781356728383e-05, |
| "loss": 3.4679, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1274411788021593e-05, |
| "loss": 3.4519, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1266025840511072e-05, |
| "loss": 3.4664, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.875530481338501, |
| "eval_runtime": 335.9196, |
| "eval_samples_per_second": 1135.959, |
| "eval_steps_per_second": 35.5, |
| "step": 1755360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1257639893000552e-05, |
| "loss": 3.4571, |
| "step": 1755648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1249253945490032e-05, |
| "loss": 3.4535, |
| "step": 1756160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.124086799797951e-05, |
| "loss": 3.4661, |
| "step": 1756672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1232482050468992e-05, |
| "loss": 3.4589, |
| "step": 1757184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1224112481762205e-05, |
| "loss": 3.4759, |
| "step": 1757696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1215726534251685e-05, |
| "loss": 3.4592, |
| "step": 1758208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.120734058674116e-05, |
| "loss": 3.461, |
| "step": 1758720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.119895463923064e-05, |
| "loss": 3.4638, |
| "step": 1759232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1190585070523854e-05, |
| "loss": 3.4584, |
| "step": 1759744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1182199123013334e-05, |
| "loss": 3.4602, |
| "step": 1760256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1173813175502814e-05, |
| "loss": 3.4616, |
| "step": 1760768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1165427227992294e-05, |
| "loss": 3.4662, |
| "step": 1761280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1157074038089235e-05, |
| "loss": 3.4644, |
| "step": 1761792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1148688090578715e-05, |
| "loss": 3.4496, |
| "step": 1762304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.11403021430682e-05, |
| "loss": 3.4558, |
| "step": 1762816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.113191619555768e-05, |
| "loss": 3.4467, |
| "step": 1763328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.112353024804716e-05, |
| "loss": 3.4513, |
| "step": 1763840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1115144300536635e-05, |
| "loss": 3.4611, |
| "step": 1764352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1106758353026115e-05, |
| "loss": 3.4527, |
| "step": 1764864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1098372405515595e-05, |
| "loss": 3.4765, |
| "step": 1765376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1090002836808807e-05, |
| "loss": 3.4695, |
| "step": 1765888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1081616889298287e-05, |
| "loss": 3.4715, |
| "step": 1766400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1073230941787767e-05, |
| "loss": 3.4588, |
| "step": 1766912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1064844994277247e-05, |
| "loss": 3.4586, |
| "step": 1767424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1056475425570456e-05, |
| "loss": 3.4652, |
| "step": 1767936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1048089478059936e-05, |
| "loss": 3.4594, |
| "step": 1768448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1039703530549416e-05, |
| "loss": 3.4553, |
| "step": 1768960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.10313175830389e-05, |
| "loss": 3.466, |
| "step": 1769472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.102294801433211e-05, |
| "loss": 3.4431, |
| "step": 1769984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.101456206682159e-05, |
| "loss": 3.4528, |
| "step": 1770496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.100617611931107e-05, |
| "loss": 3.4583, |
| "step": 1771008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.099779017180055e-05, |
| "loss": 3.4622, |
| "step": 1771520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.098942060309376e-05, |
| "loss": 3.4688, |
| "step": 1772032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.098103465558324e-05, |
| "loss": 3.466, |
| "step": 1772544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.097264870807272e-05, |
| "loss": 3.4515, |
| "step": 1773056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.09642627605622e-05, |
| "loss": 3.4536, |
| "step": 1773568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.095589319185541e-05, |
| "loss": 3.4576, |
| "step": 1774080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.094750724434489e-05, |
| "loss": 3.451, |
| "step": 1774592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.093912129683437e-05, |
| "loss": 3.4551, |
| "step": 1775104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0930735349323853e-05, |
| "loss": 3.4462, |
| "step": 1775616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0922365780617062e-05, |
| "loss": 3.4588, |
| "step": 1776128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0913979833106542e-05, |
| "loss": 3.456, |
| "step": 1776640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0905593885596022e-05, |
| "loss": 3.4634, |
| "step": 1777152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.089722431688923e-05, |
| "loss": 3.4582, |
| "step": 1777664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0888838369378715e-05, |
| "loss": 3.4599, |
| "step": 1778176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0880452421868195e-05, |
| "loss": 3.4572, |
| "step": 1778688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0872066474357675e-05, |
| "loss": 3.4606, |
| "step": 1779200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0863680526847155e-05, |
| "loss": 3.4498, |
| "step": 1779712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0855310958140364e-05, |
| "loss": 3.4343, |
| "step": 1780224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0846925010629844e-05, |
| "loss": 3.4735, |
| "step": 1780736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0838539063119324e-05, |
| "loss": 3.4489, |
| "step": 1781248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0830153115608807e-05, |
| "loss": 3.4611, |
| "step": 1781760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0821783546902016e-05, |
| "loss": 3.4494, |
| "step": 1782272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0813397599391496e-05, |
| "loss": 3.4456, |
| "step": 1782784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0805011651880976e-05, |
| "loss": 3.4374, |
| "step": 1783296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0796625704370456e-05, |
| "loss": 3.4501, |
| "step": 1783808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.078825613566367e-05, |
| "loss": 3.4606, |
| "step": 1784320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077987018815315e-05, |
| "loss": 3.4548, |
| "step": 1784832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077148424064263e-05, |
| "loss": 3.4647, |
| "step": 1785344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0763098293132108e-05, |
| "loss": 3.4559, |
| "step": 1785856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0754728724425317e-05, |
| "loss": 3.4386, |
| "step": 1786368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0746342776914797e-05, |
| "loss": 3.4592, |
| "step": 1786880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0737956829404277e-05, |
| "loss": 3.4408, |
| "step": 1787392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.072957088189376e-05, |
| "loss": 3.436, |
| "step": 1787904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.072120131318697e-05, |
| "loss": 3.4667, |
| "step": 1788416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.071281536567645e-05, |
| "loss": 3.4553, |
| "step": 1788928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.070442941816593e-05, |
| "loss": 3.4443, |
| "step": 1789440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0696043470655406e-05, |
| "loss": 3.4489, |
| "step": 1789952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0687673901948622e-05, |
| "loss": 3.443, |
| "step": 1790464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0679287954438102e-05, |
| "loss": 3.4443, |
| "step": 1790976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0670902006927582e-05, |
| "loss": 3.4566, |
| "step": 1791488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.066251605941706e-05, |
| "loss": 3.4545, |
| "step": 1792000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.065414649071027e-05, |
| "loss": 3.4621, |
| "step": 1792512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.064576054319975e-05, |
| "loss": 3.4524, |
| "step": 1793024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.063737459568923e-05, |
| "loss": 3.4661, |
| "step": 1793536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.062898864817871e-05, |
| "loss": 3.4452, |
| "step": 1794048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0620619079471923e-05, |
| "loss": 3.4652, |
| "step": 1794560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0612233131961403e-05, |
| "loss": 3.4472, |
| "step": 1795072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.060384718445088e-05, |
| "loss": 3.4453, |
| "step": 1795584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.059546123694036e-05, |
| "loss": 3.4584, |
| "step": 1796096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0587091668233572e-05, |
| "loss": 3.4645, |
| "step": 1796608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0578705720723056e-05, |
| "loss": 3.4556, |
| "step": 1797120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0570319773212532e-05, |
| "loss": 3.4526, |
| "step": 1797632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0561933825702012e-05, |
| "loss": 3.4363, |
| "step": 1798144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0553564256995225e-05, |
| "loss": 3.4424, |
| "step": 1798656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.05451783094847e-05, |
| "loss": 3.453, |
| "step": 1799168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0536792361974185e-05, |
| "loss": 3.4615, |
| "step": 1799680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0528406414463665e-05, |
| "loss": 3.4469, |
| "step": 1800192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0520036845756877e-05, |
| "loss": 3.456, |
| "step": 1800704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0511650898246354e-05, |
| "loss": 3.4431, |
| "step": 1801216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0503264950735834e-05, |
| "loss": 3.4559, |
| "step": 1801728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0494879003225314e-05, |
| "loss": 3.4457, |
| "step": 1802240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0486509434518526e-05, |
| "loss": 3.4497, |
| "step": 1802752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0478123487008006e-05, |
| "loss": 3.4592, |
| "step": 1803264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0469737539497486e-05, |
| "loss": 3.4595, |
| "step": 1803776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0461351591986966e-05, |
| "loss": 3.4665, |
| "step": 1804288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0452982023280175e-05, |
| "loss": 3.4422, |
| "step": 1804800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0444596075769655e-05, |
| "loss": 3.4571, |
| "step": 1805312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0436210128259138e-05, |
| "loss": 3.4444, |
| "step": 1805824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0427824180748618e-05, |
| "loss": 3.4564, |
| "step": 1806336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0419454612041827e-05, |
| "loss": 3.4484, |
| "step": 1806848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0411068664531307e-05, |
| "loss": 3.4578, |
| "step": 1807360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0402682717020787e-05, |
| "loss": 3.4439, |
| "step": 1807872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0394296769510267e-05, |
| "loss": 3.4509, |
| "step": 1808384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.038592720080348e-05, |
| "loss": 3.4475, |
| "step": 1808896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.037754125329296e-05, |
| "loss": 3.4517, |
| "step": 1809408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.036915530578244e-05, |
| "loss": 3.4367, |
| "step": 1809920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.036076935827192e-05, |
| "loss": 3.4567, |
| "step": 1810432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.035239978956513e-05, |
| "loss": 3.449, |
| "step": 1810944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.034401384205461e-05, |
| "loss": 3.4514, |
| "step": 1811456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0335627894544092e-05, |
| "loss": 3.4489, |
| "step": 1811968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0327241947033572e-05, |
| "loss": 3.4523, |
| "step": 1812480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.031887237832678e-05, |
| "loss": 3.4548, |
| "step": 1812992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.031048643081626e-05, |
| "loss": 3.451, |
| "step": 1813504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.030210048330574e-05, |
| "loss": 3.4551, |
| "step": 1814016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.029371453579522e-05, |
| "loss": 3.4657, |
| "step": 1814528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0285344967088433e-05, |
| "loss": 3.4502, |
| "step": 1815040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0276959019577913e-05, |
| "loss": 3.455, |
| "step": 1815552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0268573072067393e-05, |
| "loss": 3.4461, |
| "step": 1816064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0260187124556873e-05, |
| "loss": 3.4502, |
| "step": 1816576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0251817555850082e-05, |
| "loss": 3.4481, |
| "step": 1817088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0243431608339562e-05, |
| "loss": 3.4523, |
| "step": 1817600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0235045660829046e-05, |
| "loss": 3.4369, |
| "step": 1818112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0226659713318526e-05, |
| "loss": 3.4601, |
| "step": 1818624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0218290144611735e-05, |
| "loss": 3.455, |
| "step": 1819136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0209904197101215e-05, |
| "loss": 3.4579, |
| "step": 1819648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0201518249590695e-05, |
| "loss": 3.4467, |
| "step": 1820160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0193132302080175e-05, |
| "loss": 3.4406, |
| "step": 1820672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0184762733373387e-05, |
| "loss": 3.4476, |
| "step": 1821184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0176376785862867e-05, |
| "loss": 3.4593, |
| "step": 1821696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0167990838352347e-05, |
| "loss": 3.4564, |
| "step": 1822208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0159604890841827e-05, |
| "loss": 3.4622, |
| "step": 1822720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0151235322135036e-05, |
| "loss": 3.4371, |
| "step": 1823232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0142849374624516e-05, |
| "loss": 3.4574, |
| "step": 1823744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0134463427113996e-05, |
| "loss": 3.4456, |
| "step": 1824256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.012607747960348e-05, |
| "loss": 3.4607, |
| "step": 1824768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.011770791089669e-05, |
| "loss": 3.4448, |
| "step": 1825280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0109321963386168e-05, |
| "loss": 3.4548, |
| "step": 1825792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0100936015875648e-05, |
| "loss": 3.4524, |
| "step": 1826304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0092550068365128e-05, |
| "loss": 3.4679, |
| "step": 1826816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.008418049965834e-05, |
| "loss": 3.4493, |
| "step": 1827328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.007579455214782e-05, |
| "loss": 3.4551, |
| "step": 1827840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.00674086046373e-05, |
| "loss": 3.4459, |
| "step": 1828352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0059022657126777e-05, |
| "loss": 3.454, |
| "step": 1828864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.005065308841999e-05, |
| "loss": 3.4502, |
| "step": 1829376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.004226714090947e-05, |
| "loss": 3.4491, |
| "step": 1829888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.003388119339895e-05, |
| "loss": 3.4591, |
| "step": 1830400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.002549524588843e-05, |
| "loss": 3.437, |
| "step": 1830912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0017125677181642e-05, |
| "loss": 3.4601, |
| "step": 1831424 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8761610984802246, |
| "eval_runtime": 306.6428, |
| "eval_samples_per_second": 1244.415, |
| "eval_steps_per_second": 38.889, |
| "step": 1831680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.0008739729671122e-05, |
| "loss": 3.446, |
| "step": 1831936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.0000353782160602e-05, |
| "loss": 3.4432, |
| "step": 1832448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.999196783465008e-05, |
| "loss": 3.4532, |
| "step": 1832960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9983598265943294e-05, |
| "loss": 3.4481, |
| "step": 1833472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9975212318432774e-05, |
| "loss": 3.4624, |
| "step": 1833984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.996682637092225e-05, |
| "loss": 3.4486, |
| "step": 1834496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.995844042341173e-05, |
| "loss": 3.4518, |
| "step": 1835008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9950070854704943e-05, |
| "loss": 3.4549, |
| "step": 1835520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9941684907194423e-05, |
| "loss": 3.4465, |
| "step": 1836032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9933298959683903e-05, |
| "loss": 3.45, |
| "step": 1836544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9924913012173383e-05, |
| "loss": 3.4501, |
| "step": 1837056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9916543443466596e-05, |
| "loss": 3.4526, |
| "step": 1837568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9908173874759805e-05, |
| "loss": 3.4564, |
| "step": 1838080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9899787927249285e-05, |
| "loss": 3.4341, |
| "step": 1838592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9891401979738765e-05, |
| "loss": 3.4476, |
| "step": 1839104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9883016032228248e-05, |
| "loss": 3.4334, |
| "step": 1839616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9874630084717725e-05, |
| "loss": 3.4438, |
| "step": 1840128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9866244137207205e-05, |
| "loss": 3.4481, |
| "step": 1840640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9857858189696685e-05, |
| "loss": 3.4417, |
| "step": 1841152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9849472242186164e-05, |
| "loss": 3.4651, |
| "step": 1841664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9841102673479377e-05, |
| "loss": 3.4587, |
| "step": 1842176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9832716725968857e-05, |
| "loss": 3.4594, |
| "step": 1842688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9824330778458337e-05, |
| "loss": 3.4507, |
| "step": 1843200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9815944830947817e-05, |
| "loss": 3.4516, |
| "step": 1843712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9807575262241026e-05, |
| "loss": 3.4576, |
| "step": 1844224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9799189314730506e-05, |
| "loss": 3.4462, |
| "step": 1844736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9790803367219986e-05, |
| "loss": 3.4428, |
| "step": 1845248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.978241741970947e-05, |
| "loss": 3.4522, |
| "step": 1845760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9774047851002678e-05, |
| "loss": 3.4366, |
| "step": 1846272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9765661903492158e-05, |
| "loss": 3.4404, |
| "step": 1846784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9757275955981638e-05, |
| "loss": 3.4512, |
| "step": 1847296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9748890008471118e-05, |
| "loss": 3.4508, |
| "step": 1847808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.974052043976433e-05, |
| "loss": 3.4519, |
| "step": 1848320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.973213449225381e-05, |
| "loss": 3.4593, |
| "step": 1848832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.972374854474329e-05, |
| "loss": 3.4407, |
| "step": 1849344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.971536259723277e-05, |
| "loss": 3.442, |
| "step": 1849856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.970699302852598e-05, |
| "loss": 3.4461, |
| "step": 1850368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.969860708101546e-05, |
| "loss": 3.4363, |
| "step": 1850880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.969022113350494e-05, |
| "loss": 3.4456, |
| "step": 1851392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.968183518599442e-05, |
| "loss": 3.4392, |
| "step": 1851904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9673465617287632e-05, |
| "loss": 3.444, |
| "step": 1852416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9665079669777112e-05, |
| "loss": 3.4481, |
| "step": 1852928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9656693722266592e-05, |
| "loss": 3.4512, |
| "step": 1853440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.96483241535598e-05, |
| "loss": 3.4492, |
| "step": 1853952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.963993820604928e-05, |
| "loss": 3.4494, |
| "step": 1854464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9631552258538764e-05, |
| "loss": 3.4452, |
| "step": 1854976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9623166311028244e-05, |
| "loss": 3.4471, |
| "step": 1855488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9614780363517724e-05, |
| "loss": 3.4478, |
| "step": 1856000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9606410794810933e-05, |
| "loss": 3.4224, |
| "step": 1856512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9598024847300413e-05, |
| "loss": 3.4597, |
| "step": 1857024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9589638899789893e-05, |
| "loss": 3.4389, |
| "step": 1857536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9581252952279373e-05, |
| "loss": 3.4526, |
| "step": 1858048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9572899762376318e-05, |
| "loss": 3.4369, |
| "step": 1858560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9564513814865795e-05, |
| "loss": 3.4336, |
| "step": 1859072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9556127867355275e-05, |
| "loss": 3.4302, |
| "step": 1859584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9547741919844755e-05, |
| "loss": 3.4349, |
| "step": 1860096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9539355972334235e-05, |
| "loss": 3.4543, |
| "step": 1860608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9530970024823718e-05, |
| "loss": 3.4401, |
| "step": 1861120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9522584077313198e-05, |
| "loss": 3.4571, |
| "step": 1861632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9514198129802674e-05, |
| "loss": 3.4473, |
| "step": 1862144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9505828561095887e-05, |
| "loss": 3.4275, |
| "step": 1862656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9497442613585367e-05, |
| "loss": 3.4463, |
| "step": 1863168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9489056666074847e-05, |
| "loss": 3.4383, |
| "step": 1863680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9480670718564327e-05, |
| "loss": 3.4243, |
| "step": 1864192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.947230114985754e-05, |
| "loss": 3.453, |
| "step": 1864704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.946391520234702e-05, |
| "loss": 3.4431, |
| "step": 1865216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.94555292548365e-05, |
| "loss": 3.4352, |
| "step": 1865728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9447143307325976e-05, |
| "loss": 3.4407, |
| "step": 1866240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9438773738619188e-05, |
| "loss": 3.4268, |
| "step": 1866752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.943038779110867e-05, |
| "loss": 3.439, |
| "step": 1867264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9422001843598148e-05, |
| "loss": 3.4455, |
| "step": 1867776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9413615896087628e-05, |
| "loss": 3.4422, |
| "step": 1868288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.940524632738084e-05, |
| "loss": 3.4539, |
| "step": 1868800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.939686037987032e-05, |
| "loss": 3.4403, |
| "step": 1869312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.93884744323598e-05, |
| "loss": 3.4556, |
| "step": 1869824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.938008848484928e-05, |
| "loss": 3.4398, |
| "step": 1870336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9371718916142493e-05, |
| "loss": 3.4528, |
| "step": 1870848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9363332968631973e-05, |
| "loss": 3.436, |
| "step": 1871360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.935494702112145e-05, |
| "loss": 3.4328, |
| "step": 1871872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.934656107361093e-05, |
| "loss": 3.447, |
| "step": 1872384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9338191504904142e-05, |
| "loss": 3.4555, |
| "step": 1872896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9329805557393622e-05, |
| "loss": 3.4436, |
| "step": 1873408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9321419609883102e-05, |
| "loss": 3.4439, |
| "step": 1873920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9313033662372582e-05, |
| "loss": 3.425, |
| "step": 1874432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9304664093665794e-05, |
| "loss": 3.4301, |
| "step": 1874944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.929627814615527e-05, |
| "loss": 3.4447, |
| "step": 1875456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9287892198644754e-05, |
| "loss": 3.4489, |
| "step": 1875968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9279506251134234e-05, |
| "loss": 3.4371, |
| "step": 1876480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9271136682427443e-05, |
| "loss": 3.4463, |
| "step": 1876992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9262750734916923e-05, |
| "loss": 3.4342, |
| "step": 1877504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9254364787406403e-05, |
| "loss": 3.4413, |
| "step": 1878016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9245978839895883e-05, |
| "loss": 3.4373, |
| "step": 1878528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9237609271189096e-05, |
| "loss": 3.4364, |
| "step": 1879040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9229223323678576e-05, |
| "loss": 3.4493, |
| "step": 1879552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9220837376168055e-05, |
| "loss": 3.4489, |
| "step": 1880064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9212451428657535e-05, |
| "loss": 3.4555, |
| "step": 1880576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9204081859950745e-05, |
| "loss": 3.4311, |
| "step": 1881088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9195695912440224e-05, |
| "loss": 3.4445, |
| "step": 1881600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9187309964929704e-05, |
| "loss": 3.4385, |
| "step": 1882112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9178924017419188e-05, |
| "loss": 3.4447, |
| "step": 1882624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9170554448712397e-05, |
| "loss": 3.4379, |
| "step": 1883136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9162168501201877e-05, |
| "loss": 3.446, |
| "step": 1883648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9153782553691357e-05, |
| "loss": 3.4365, |
| "step": 1884160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9145396606180837e-05, |
| "loss": 3.4379, |
| "step": 1884672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.913702703747405e-05, |
| "loss": 3.4362, |
| "step": 1885184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.912864108996353e-05, |
| "loss": 3.4412, |
| "step": 1885696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.912025514245301e-05, |
| "loss": 3.4267, |
| "step": 1886208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.911186919494249e-05, |
| "loss": 3.4461, |
| "step": 1886720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9103499626235698e-05, |
| "loss": 3.44, |
| "step": 1887232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9095113678725178e-05, |
| "loss": 3.4361, |
| "step": 1887744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9086727731214658e-05, |
| "loss": 3.4435, |
| "step": 1888256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.907834178370414e-05, |
| "loss": 3.4362, |
| "step": 1888768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.906997221499735e-05, |
| "loss": 3.4456, |
| "step": 1889280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.906158626748683e-05, |
| "loss": 3.4398, |
| "step": 1889792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.905320031997631e-05, |
| "loss": 3.4449, |
| "step": 1890304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.904481437246579e-05, |
| "loss": 3.4555, |
| "step": 1890816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9036444803759003e-05, |
| "loss": 3.4432, |
| "step": 1891328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9028058856248483e-05, |
| "loss": 3.4414, |
| "step": 1891840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9019672908737963e-05, |
| "loss": 3.4389, |
| "step": 1892352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9011286961227443e-05, |
| "loss": 3.4373, |
| "step": 1892864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9002917392520652e-05, |
| "loss": 3.439, |
| "step": 1893376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8994531445010132e-05, |
| "loss": 3.4426, |
| "step": 1893888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8986145497499612e-05, |
| "loss": 3.4248, |
| "step": 1894400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8977759549989095e-05, |
| "loss": 3.4518, |
| "step": 1894912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8969389981282304e-05, |
| "loss": 3.442, |
| "step": 1895424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8961004033771784e-05, |
| "loss": 3.4473, |
| "step": 1895936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8952618086261264e-05, |
| "loss": 3.4407, |
| "step": 1896448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8944232138750744e-05, |
| "loss": 3.431, |
| "step": 1896960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8935862570043957e-05, |
| "loss": 3.4366, |
| "step": 1897472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8927476622533437e-05, |
| "loss": 3.4477, |
| "step": 1897984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8919090675022916e-05, |
| "loss": 3.4444, |
| "step": 1898496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8910704727512396e-05, |
| "loss": 3.4517, |
| "step": 1899008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8902335158805606e-05, |
| "loss": 3.4243, |
| "step": 1899520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8893949211295085e-05, |
| "loss": 3.4483, |
| "step": 1900032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8885563263784565e-05, |
| "loss": 3.4356, |
| "step": 1900544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8877177316274045e-05, |
| "loss": 3.4511, |
| "step": 1901056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8868807747567258e-05, |
| "loss": 3.4326, |
| "step": 1901568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8860421800056738e-05, |
| "loss": 3.4412, |
| "step": 1902080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8852035852546218e-05, |
| "loss": 3.4455, |
| "step": 1902592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8843649905035694e-05, |
| "loss": 3.4549, |
| "step": 1903104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.883528033632891e-05, |
| "loss": 3.4364, |
| "step": 1903616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.882689438881839e-05, |
| "loss": 3.4461, |
| "step": 1904128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.881850844130787e-05, |
| "loss": 3.4365, |
| "step": 1904640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8810122493797347e-05, |
| "loss": 3.4468, |
| "step": 1905152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.880175292509056e-05, |
| "loss": 3.4365, |
| "step": 1905664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.879336697758004e-05, |
| "loss": 3.4373, |
| "step": 1906176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.878498103006952e-05, |
| "loss": 3.4464, |
| "step": 1906688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8776595082559e-05, |
| "loss": 3.4313, |
| "step": 1907200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.876822551385221e-05, |
| "loss": 3.4455, |
| "step": 1907712 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.878638744354248, |
| "eval_runtime": 306.9498, |
| "eval_samples_per_second": 1243.171, |
| "eval_steps_per_second": 38.85, |
| "step": 1908000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.875983956634169e-05, |
| "loss": 3.4334, |
| "step": 1908224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8751453618831168e-05, |
| "loss": 3.4309, |
| "step": 1908736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8743067671320648e-05, |
| "loss": 3.442, |
| "step": 1909248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8734681723810128e-05, |
| "loss": 3.4397, |
| "step": 1909760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8726312155103344e-05, |
| "loss": 3.4571, |
| "step": 1910272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.871792620759282e-05, |
| "loss": 3.4395, |
| "step": 1910784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.87095402600823e-05, |
| "loss": 3.4376, |
| "step": 1911296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.870115431257178e-05, |
| "loss": 3.447, |
| "step": 1911808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.869278474386499e-05, |
| "loss": 3.4353, |
| "step": 1912320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8684398796354473e-05, |
| "loss": 3.439, |
| "step": 1912832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8676012848843953e-05, |
| "loss": 3.441, |
| "step": 1913344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8667626901333433e-05, |
| "loss": 3.444, |
| "step": 1913856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8659257332626642e-05, |
| "loss": 3.4464, |
| "step": 1914368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8650871385116122e-05, |
| "loss": 3.4266, |
| "step": 1914880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.86424854376056e-05, |
| "loss": 3.4323, |
| "step": 1915392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.863409949009508e-05, |
| "loss": 3.4239, |
| "step": 1915904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8625729921388294e-05, |
| "loss": 3.4348, |
| "step": 1916416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8617343973877774e-05, |
| "loss": 3.441, |
| "step": 1916928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8608958026367254e-05, |
| "loss": 3.4275, |
| "step": 1917440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8600572078856734e-05, |
| "loss": 3.4538, |
| "step": 1917952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8592202510149943e-05, |
| "loss": 3.4474, |
| "step": 1918464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8583816562639426e-05, |
| "loss": 3.4472, |
| "step": 1918976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8575430615128906e-05, |
| "loss": 3.4413, |
| "step": 1919488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8567044667618386e-05, |
| "loss": 3.4389, |
| "step": 1920000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8558675098911595e-05, |
| "loss": 3.4443, |
| "step": 1920512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8550289151401075e-05, |
| "loss": 3.4412, |
| "step": 1921024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8541903203890555e-05, |
| "loss": 3.4287, |
| "step": 1921536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8533517256380035e-05, |
| "loss": 3.4463, |
| "step": 1922048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8525147687673248e-05, |
| "loss": 3.4225, |
| "step": 1922560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.8516761740162728e-05, |
| "loss": 3.4327, |
| "step": 1923072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8508375792652208e-05, |
| "loss": 3.439, |
| "step": 1923584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8499989845141688e-05, |
| "loss": 3.4392, |
| "step": 1924096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8491620276434897e-05, |
| "loss": 3.4408, |
| "step": 1924608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.848323432892438e-05, |
| "loss": 3.4456, |
| "step": 1925120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.847484838141386e-05, |
| "loss": 3.4355, |
| "step": 1925632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.846646243390334e-05, |
| "loss": 3.4282, |
| "step": 1926144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.845809286519655e-05, |
| "loss": 3.4389, |
| "step": 1926656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.844970691768603e-05, |
| "loss": 3.4255, |
| "step": 1927168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.844132097017551e-05, |
| "loss": 3.433, |
| "step": 1927680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.843293502266499e-05, |
| "loss": 3.4355, |
| "step": 1928192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.842454907515447e-05, |
| "loss": 3.4324, |
| "step": 1928704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.841617950644768e-05, |
| "loss": 3.4345, |
| "step": 1929216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.840779355893716e-05, |
| "loss": 3.4436, |
| "step": 1929728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.839940761142664e-05, |
| "loss": 3.4398, |
| "step": 1930240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.839103804271985e-05, |
| "loss": 3.4378, |
| "step": 1930752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8382652095209334e-05, |
| "loss": 3.4352, |
| "step": 1931264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8374266147698814e-05, |
| "loss": 3.4351, |
| "step": 1931776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8365880200188294e-05, |
| "loss": 3.4353, |
| "step": 1932288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8357510631481503e-05, |
| "loss": 3.4139, |
| "step": 1932800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8349124683970983e-05, |
| "loss": 3.4478, |
| "step": 1933312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8340738736460463e-05, |
| "loss": 3.4306, |
| "step": 1933824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8332352788949943e-05, |
| "loss": 3.4352, |
| "step": 1934336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8323983220243155e-05, |
| "loss": 3.4281, |
| "step": 1934848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8315597272732635e-05, |
| "loss": 3.4233, |
| "step": 1935360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8307211325222115e-05, |
| "loss": 3.4227, |
| "step": 1935872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.829882537771159e-05, |
| "loss": 3.4252, |
| "step": 1936384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.829043943020107e-05, |
| "loss": 3.4442, |
| "step": 1936896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.828205348269055e-05, |
| "loss": 3.4299, |
| "step": 1937408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8273667535180035e-05, |
| "loss": 3.4476, |
| "step": 1937920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8265281587669515e-05, |
| "loss": 3.4375, |
| "step": 1938432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8256912018962724e-05, |
| "loss": 3.416, |
| "step": 1938944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8248526071452204e-05, |
| "loss": 3.4318, |
| "step": 1939456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8240140123941684e-05, |
| "loss": 3.4244, |
| "step": 1939968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8231754176431164e-05, |
| "loss": 3.4205, |
| "step": 1940480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8223384607724376e-05, |
| "loss": 3.44, |
| "step": 1940992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8214998660213856e-05, |
| "loss": 3.4335, |
| "step": 1941504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8206612712703336e-05, |
| "loss": 3.4253, |
| "step": 1942016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8198226765192816e-05, |
| "loss": 3.4307, |
| "step": 1942528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8189857196486025e-05, |
| "loss": 3.4205, |
| "step": 1943040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8181471248975505e-05, |
| "loss": 3.4214, |
| "step": 1943552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.817308530146499e-05, |
| "loss": 3.4354, |
| "step": 1944064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.816469935395447e-05, |
| "loss": 3.4335, |
| "step": 1944576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8156329785247678e-05, |
| "loss": 3.4433, |
| "step": 1945088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8147943837737158e-05, |
| "loss": 3.43, |
| "step": 1945600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8139557890226637e-05, |
| "loss": 3.4471, |
| "step": 1946112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8131171942716117e-05, |
| "loss": 3.4272, |
| "step": 1946624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.812280237400933e-05, |
| "loss": 3.447, |
| "step": 1947136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.811441642649881e-05, |
| "loss": 3.4218, |
| "step": 1947648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.810603047898829e-05, |
| "loss": 3.4246, |
| "step": 1948160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.809764453147777e-05, |
| "loss": 3.4338, |
| "step": 1948672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.808927496277098e-05, |
| "loss": 3.4462, |
| "step": 1949184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.808088901526046e-05, |
| "loss": 3.4335, |
| "step": 1949696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8072503067749942e-05, |
| "loss": 3.4355, |
| "step": 1950208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8064117120239422e-05, |
| "loss": 3.4139, |
| "step": 1950720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.805574755153263e-05, |
| "loss": 3.4174, |
| "step": 1951232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.804736160402211e-05, |
| "loss": 3.4373, |
| "step": 1951744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.803897565651159e-05, |
| "loss": 3.4362, |
| "step": 1952256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.803058970900107e-05, |
| "loss": 3.4262, |
| "step": 1952768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8022220140294284e-05, |
| "loss": 3.4349, |
| "step": 1953280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8013834192783764e-05, |
| "loss": 3.4276, |
| "step": 1953792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8005448245273244e-05, |
| "loss": 3.4274, |
| "step": 1954304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.799706229776272e-05, |
| "loss": 3.4261, |
| "step": 1954816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7988692729055933e-05, |
| "loss": 3.4263, |
| "step": 1955328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7980306781545413e-05, |
| "loss": 3.4397, |
| "step": 1955840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7971920834034892e-05, |
| "loss": 3.4417, |
| "step": 1956352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7963534886524372e-05, |
| "loss": 3.4429, |
| "step": 1956864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7955165317817585e-05, |
| "loss": 3.4209, |
| "step": 1957376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7946779370307065e-05, |
| "loss": 3.436, |
| "step": 1957888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.793839342279654e-05, |
| "loss": 3.4263, |
| "step": 1958400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7930007475286025e-05, |
| "loss": 3.4362, |
| "step": 1958912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7921637906579237e-05, |
| "loss": 3.4302, |
| "step": 1959424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7913251959068717e-05, |
| "loss": 3.4299, |
| "step": 1959936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7904866011558194e-05, |
| "loss": 3.4262, |
| "step": 1960448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7896480064047674e-05, |
| "loss": 3.4244, |
| "step": 1960960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7888110495340886e-05, |
| "loss": 3.4292, |
| "step": 1961472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7879724547830366e-05, |
| "loss": 3.4287, |
| "step": 1961984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7871338600319846e-05, |
| "loss": 3.4212, |
| "step": 1962496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7862952652809326e-05, |
| "loss": 3.4331, |
| "step": 1963008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.785458308410254e-05, |
| "loss": 3.4294, |
| "step": 1963520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7846197136592015e-05, |
| "loss": 3.4252, |
| "step": 1964032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7837811189081495e-05, |
| "loss": 3.4291, |
| "step": 1964544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7829425241570975e-05, |
| "loss": 3.4272, |
| "step": 1965056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.782105567286419e-05, |
| "loss": 3.436, |
| "step": 1965568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7812669725353668e-05, |
| "loss": 3.4277, |
| "step": 1966080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7804283777843147e-05, |
| "loss": 3.4353, |
| "step": 1966592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7795897830332627e-05, |
| "loss": 3.4427, |
| "step": 1967104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7787528261625837e-05, |
| "loss": 3.4356, |
| "step": 1967616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.777914231411532e-05, |
| "loss": 3.4306, |
| "step": 1968128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.77707563666048e-05, |
| "loss": 3.4312, |
| "step": 1968640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.776237041909428e-05, |
| "loss": 3.4268, |
| "step": 1969152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.775400085038749e-05, |
| "loss": 3.4302, |
| "step": 1969664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.774561490287697e-05, |
| "loss": 3.4261, |
| "step": 1970176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.773722895536645e-05, |
| "loss": 3.4174, |
| "step": 1970688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.772884300785593e-05, |
| "loss": 3.4421, |
| "step": 1971200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.772047343914914e-05, |
| "loss": 3.4336, |
| "step": 1971712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.771208749163862e-05, |
| "loss": 3.4342, |
| "step": 1972224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.77037015441281e-05, |
| "loss": 3.429, |
| "step": 1972736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.769531559661758e-05, |
| "loss": 3.4195, |
| "step": 1973248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.768694602791079e-05, |
| "loss": 3.4264, |
| "step": 1973760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7678560080400274e-05, |
| "loss": 3.4378, |
| "step": 1974272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7670174132889753e-05, |
| "loss": 3.4332, |
| "step": 1974784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7661788185379233e-05, |
| "loss": 3.4405, |
| "step": 1975296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7653418616672443e-05, |
| "loss": 3.4166, |
| "step": 1975808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7645032669161922e-05, |
| "loss": 3.4407, |
| "step": 1976320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7636646721651402e-05, |
| "loss": 3.4235, |
| "step": 1976832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7628260774140882e-05, |
| "loss": 3.4361, |
| "step": 1977344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7619891205434095e-05, |
| "loss": 3.4289, |
| "step": 1977856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7611505257923575e-05, |
| "loss": 3.4249, |
| "step": 1978368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7603119310413055e-05, |
| "loss": 3.4349, |
| "step": 1978880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7594733362902535e-05, |
| "loss": 3.4484, |
| "step": 1979392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7586363794195744e-05, |
| "loss": 3.4254, |
| "step": 1979904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7577977846685227e-05, |
| "loss": 3.4353, |
| "step": 1980416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7569591899174707e-05, |
| "loss": 3.4246, |
| "step": 1980928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7561205951664187e-05, |
| "loss": 3.4325, |
| "step": 1981440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7552836382957396e-05, |
| "loss": 3.428, |
| "step": 1981952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7544450435446876e-05, |
| "loss": 3.428, |
| "step": 1982464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7536064487936356e-05, |
| "loss": 3.4352, |
| "step": 1982976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7527678540425836e-05, |
| "loss": 3.4246, |
| "step": 1983488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.751930897171905e-05, |
| "loss": 3.4387, |
| "step": 1984000 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.879655599594116, |
| "eval_runtime": 305.201, |
| "eval_samples_per_second": 1250.294, |
| "eval_steps_per_second": 39.073, |
| "step": 1984320 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.37353200907733e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|