| { |
| "best_metric": 3.8362677097320557, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/transformer/3/checkpoints/checkpoint-915829", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1450069, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.9262, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8448, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.1978, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.9733, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8185, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.6949, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.5933, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5267, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4739, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.3928, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3519, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.3119, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 5.2679, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.2061, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1775, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.146, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1069, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0731, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0519, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0249, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 4.9946, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.966, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9543, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9242, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.9154, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.8875, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8765, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8582, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8245, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.8264, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.7969, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.7862, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.7754, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.97233456261715e-05, |
| "loss": 4.7678, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971495967866098e-05, |
| "loss": 4.7425, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 4.7337, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969818778363994e-05, |
| "loss": 4.7324, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.7159, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.7065, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.6884, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6721, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96562908036948e-05, |
| "loss": 4.6661, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964790485618428e-05, |
| "loss": 4.6422, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 4.6601, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6347, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9622763392456454e-05, |
| "loss": 4.635, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9614377444945934e-05, |
| "loss": 4.6263, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9605991497435414e-05, |
| "loss": 4.6213, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9597605549924894e-05, |
| "loss": 4.6119, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9589219602414374e-05, |
| "loss": 4.5893, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958085003370758e-05, |
| "loss": 4.5815, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957248046500079e-05, |
| "loss": 4.573, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956409451749027e-05, |
| "loss": 4.5807, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955570856997975e-05, |
| "loss": 4.5585, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5691, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.544, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.5374, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5333, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5289, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.525, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949702331620985e-05, |
| "loss": 4.5084, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948863736869932e-05, |
| "loss": 4.5036, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.5131, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.4943, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 4.4876, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945510995746097e-05, |
| "loss": 4.4876, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.4839, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.466, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 4.4773, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.465, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4566, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.4622, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396424703691065e-05, |
| "loss": 4.4488, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388055134984274e-05, |
| "loss": 4.4605, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379669187473754e-05, |
| "loss": 4.4615, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371283239963234e-05, |
| "loss": 4.4511, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362897292452714e-05, |
| "loss": 4.4344, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354511344942194e-05, |
| "loss": 4.4245, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93461417762354e-05, |
| "loss": 4.4141, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933775582872488e-05, |
| "loss": 4.4273, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932936988121436e-05, |
| "loss": 4.4268, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932098393370384e-05, |
| "loss": 4.4242, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931259798619332e-05, |
| "loss": 4.3983, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930421203868281e-05, |
| "loss": 4.396, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 4.4027, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92874565224655e-05, |
| "loss": 4.3904, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927907057495498e-05, |
| "loss": 4.3958, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927068462744446e-05, |
| "loss": 4.3909, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926229867993394e-05, |
| "loss": 4.3938, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925392911122715e-05, |
| "loss": 4.3836, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924554316371663e-05, |
| "loss": 4.3786, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923715721620611e-05, |
| "loss": 4.3672, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922877126869559e-05, |
| "loss": 4.3803, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92204016999888e-05, |
| "loss": 4.3646, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921201575247828e-05, |
| "loss": 4.3498, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920362980496776e-05, |
| "loss": 4.3557, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919524385745724e-05, |
| "loss": 4.3683, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918685790994672e-05, |
| "loss": 4.3612, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91784719624362e-05, |
| "loss": 4.3611, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9170086014925676e-05, |
| "loss": 4.3394, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9161700067415156e-05, |
| "loss": 4.3552, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915333049870837e-05, |
| "loss": 4.3306, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914494455119785e-05, |
| "loss": 4.3469, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136558603687325e-05, |
| "loss": 4.3517, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912818903498054e-05, |
| "loss": 4.3334, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911980308747002e-05, |
| "loss": 4.3284, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91114171399595e-05, |
| "loss": 4.3044, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910303119244898e-05, |
| "loss": 4.3163, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909464524493846e-05, |
| "loss": 4.3181, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908625929742794e-05, |
| "loss": 4.3143, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907788972872115e-05, |
| "loss": 4.3215, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906950378121063e-05, |
| "loss": 4.3028, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906111783370011e-05, |
| "loss": 4.3048, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905273188618959e-05, |
| "loss": 4.3109, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904434593867907e-05, |
| "loss": 4.3139, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903595999116855e-05, |
| "loss": 4.3002, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902759042246176e-05, |
| "loss": 4.2986, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901920447495124e-05, |
| "loss": 4.2944, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901081852744072e-05, |
| "loss": 4.2939, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90024325799302e-05, |
| "loss": 4.2981, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994063011223415e-05, |
| "loss": 4.285, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985677063712895e-05, |
| "loss": 4.2811, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977291116202375e-05, |
| "loss": 4.2789, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968905168691855e-05, |
| "loss": 4.2744, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960519221181335e-05, |
| "loss": 4.2793, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952133273670815e-05, |
| "loss": 4.279, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943747326160294e-05, |
| "loss": 4.2854, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935361378649774e-05, |
| "loss": 4.2605, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926991809942984e-05, |
| "loss": 4.2738, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918605862432463e-05, |
| "loss": 4.2752, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910219914921943e-05, |
| "loss": 4.2633, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890185034621515e-05, |
| "loss": 4.2801, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889346439870464e-05, |
| "loss": 4.2603, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888507845119412e-05, |
| "loss": 4.2722, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88766925036836e-05, |
| "loss": 4.2644, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886830655617308e-05, |
| "loss": 4.2423, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885992060866256e-05, |
| "loss": 4.243, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885153466115204e-05, |
| "loss": 4.2468, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884316509244525e-05, |
| "loss": 4.2545, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883477914493473e-05, |
| "loss": 4.2552, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882639319742421e-05, |
| "loss": 4.2537, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881800724991369e-05, |
| "loss": 4.2437, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880962130240316e-05, |
| "loss": 4.24, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.2326, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879284940738212e-05, |
| "loss": 4.2285, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87844634598716e-05, |
| "loss": 4.2368, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877609389116482e-05, |
| "loss": 4.2482, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87677079436543e-05, |
| "loss": 4.2388, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875932199614378e-05, |
| "loss": 4.221, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875093604863326e-05, |
| "loss": 4.2233, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.19038724899292, |
| "eval_runtime": 610.1579, |
| "eval_samples_per_second": 625.397, |
| "eval_steps_per_second": 19.544, |
| "step": 76319 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8742566479926466e-05, |
| "loss": 4.2252, |
| "step": 76800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8734180532415946e-05, |
| "loss": 4.2202, |
| "step": 77312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8725794584905426e-05, |
| "loss": 4.2162, |
| "step": 77824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8717408637394906e-05, |
| "loss": 4.2165, |
| "step": 78336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8709022689884386e-05, |
| "loss": 4.2069, |
| "step": 78848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8700636742373866e-05, |
| "loss": 4.1919, |
| "step": 79360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8692250794863346e-05, |
| "loss": 4.1979, |
| "step": 79872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8683864847352826e-05, |
| "loss": 4.2106, |
| "step": 80384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8675478899842306e-05, |
| "loss": 4.1975, |
| "step": 80896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.866710933113552e-05, |
| "loss": 4.2019, |
| "step": 81408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.2037, |
| "step": 81920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.865033743611448e-05, |
| "loss": 4.2043, |
| "step": 82432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.864195148860396e-05, |
| "loss": 4.1957, |
| "step": 82944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.863356554109344e-05, |
| "loss": 4.1966, |
| "step": 83456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.862517959358292e-05, |
| "loss": 4.189, |
| "step": 83968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.86167936460724e-05, |
| "loss": 4.175, |
| "step": 84480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8608407698561874e-05, |
| "loss": 4.1831, |
| "step": 84992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8600021751051354e-05, |
| "loss": 4.188, |
| "step": 85504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8591635803540834e-05, |
| "loss": 4.1851, |
| "step": 86016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8583249856030314e-05, |
| "loss": 4.1896, |
| "step": 86528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8574863908519794e-05, |
| "loss": 4.1737, |
| "step": 87040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.856649433981301e-05, |
| "loss": 4.1826, |
| "step": 87552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8558124771106226e-05, |
| "loss": 4.1728, |
| "step": 88064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.85497388235957e-05, |
| "loss": 4.182, |
| "step": 88576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.854135287608518e-05, |
| "loss": 4.1717, |
| "step": 89088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.853296692857466e-05, |
| "loss": 4.1693, |
| "step": 89600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.852458098106414e-05, |
| "loss": 4.1735, |
| "step": 90112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.851619503355362e-05, |
| "loss": 4.1559, |
| "step": 90624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.850782546484683e-05, |
| "loss": 4.1716, |
| "step": 91136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849943951733631e-05, |
| "loss": 4.1541, |
| "step": 91648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849105356982579e-05, |
| "loss": 4.1591, |
| "step": 92160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.848266762231527e-05, |
| "loss": 4.1601, |
| "step": 92672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.847428167480475e-05, |
| "loss": 4.1658, |
| "step": 93184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.846589572729423e-05, |
| "loss": 4.1537, |
| "step": 93696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.845750977978371e-05, |
| "loss": 4.1486, |
| "step": 94208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8449140211076924e-05, |
| "loss": 4.1617, |
| "step": 94720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8440754263566404e-05, |
| "loss": 4.1508, |
| "step": 95232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8432368316055884e-05, |
| "loss": 4.1596, |
| "step": 95744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8423982368545363e-05, |
| "loss": 4.1428, |
| "step": 96256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8415596421034843e-05, |
| "loss": 4.134, |
| "step": 96768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.840722685232805e-05, |
| "loss": 4.1377, |
| "step": 97280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839884090481753e-05, |
| "loss": 4.1325, |
| "step": 97792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839045495730701e-05, |
| "loss": 4.1471, |
| "step": 98304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.838206900979649e-05, |
| "loss": 4.1337, |
| "step": 98816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.837368306228597e-05, |
| "loss": 4.1398, |
| "step": 99328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.836531349357918e-05, |
| "loss": 4.1356, |
| "step": 99840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.835692754606866e-05, |
| "loss": 4.1407, |
| "step": 100352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834854159855814e-05, |
| "loss": 4.1386, |
| "step": 100864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834015565104763e-05, |
| "loss": 4.1158, |
| "step": 101376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.833176970353711e-05, |
| "loss": 4.1228, |
| "step": 101888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.832340013483032e-05, |
| "loss": 4.1172, |
| "step": 102400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.83150141873198e-05, |
| "loss": 4.1337, |
| "step": 102912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.830662823980928e-05, |
| "loss": 4.1126, |
| "step": 103424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.829824229229876e-05, |
| "loss": 4.1327, |
| "step": 103936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.1079, |
| "step": 104448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.1093, |
| "step": 104960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8273100828570926e-05, |
| "loss": 4.1178, |
| "step": 105472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8264714881060406e-05, |
| "loss": 4.1109, |
| "step": 105984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8256328933549886e-05, |
| "loss": 4.1103, |
| "step": 106496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8247959364843095e-05, |
| "loss": 4.1066, |
| "step": 107008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.823957341733258e-05, |
| "loss": 4.1004, |
| "step": 107520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.823118746982206e-05, |
| "loss": 4.1145, |
| "step": 108032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8222801522311535e-05, |
| "loss": 4.1023, |
| "step": 108544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8214415574801015e-05, |
| "loss": 4.1001, |
| "step": 109056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.820604600609423e-05, |
| "loss": 4.1009, |
| "step": 109568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.819766005858371e-05, |
| "loss": 4.1048, |
| "step": 110080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8189274111073184e-05, |
| "loss": 4.086, |
| "step": 110592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8180888163562664e-05, |
| "loss": 4.1062, |
| "step": 111104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8172502216052144e-05, |
| "loss": 4.0921, |
| "step": 111616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.816413264734535e-05, |
| "loss": 4.0949, |
| "step": 112128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.815574669983483e-05, |
| "loss": 4.1, |
| "step": 112640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.814736075232432e-05, |
| "loss": 4.0925, |
| "step": 113152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.81389748048138e-05, |
| "loss": 4.107, |
| "step": 113664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813058885730328e-05, |
| "loss": 4.1115, |
| "step": 114176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.812221928859649e-05, |
| "loss": 4.1057, |
| "step": 114688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.811383334108597e-05, |
| "loss": 4.0896, |
| "step": 115200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.810544739357545e-05, |
| "loss": 4.0812, |
| "step": 115712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.809706144606493e-05, |
| "loss": 4.0788, |
| "step": 116224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808869187735814e-05, |
| "loss": 4.0898, |
| "step": 116736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808030592984762e-05, |
| "loss": 4.0992, |
| "step": 117248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.80719199823371e-05, |
| "loss": 4.0934, |
| "step": 117760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.806353403482658e-05, |
| "loss": 4.0767, |
| "step": 118272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.805514808731606e-05, |
| "loss": 4.0712, |
| "step": 118784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.804677851860927e-05, |
| "loss": 4.0837, |
| "step": 119296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803839257109875e-05, |
| "loss": 4.0712, |
| "step": 119808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803000662358823e-05, |
| "loss": 4.0787, |
| "step": 120320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.802162067607771e-05, |
| "loss": 4.0813, |
| "step": 120832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.801325110737092e-05, |
| "loss": 4.0864, |
| "step": 121344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.80048651598604e-05, |
| "loss": 4.0758, |
| "step": 121856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.799647921234988e-05, |
| "loss": 4.0739, |
| "step": 122368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.798809326483936e-05, |
| "loss": 4.0661, |
| "step": 122880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797972369613257e-05, |
| "loss": 4.0764, |
| "step": 123392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797133774862205e-05, |
| "loss": 4.0747, |
| "step": 123904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.796295180111153e-05, |
| "loss": 4.0553, |
| "step": 124416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.795456585360101e-05, |
| "loss": 4.0645, |
| "step": 124928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.794619628489423e-05, |
| "loss": 4.0748, |
| "step": 125440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.793781033738371e-05, |
| "loss": 4.0816, |
| "step": 125952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792942438987319e-05, |
| "loss": 4.0709, |
| "step": 126464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792103844236267e-05, |
| "loss": 4.0609, |
| "step": 126976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7912668873655876e-05, |
| "loss": 4.0737, |
| "step": 127488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7904282926145356e-05, |
| "loss": 4.0521, |
| "step": 128000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7895896978634836e-05, |
| "loss": 4.0692, |
| "step": 128512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7887511031124316e-05, |
| "loss": 4.0745, |
| "step": 129024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7879141462417525e-05, |
| "loss": 4.0638, |
| "step": 129536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7870755514907005e-05, |
| "loss": 4.0642, |
| "step": 130048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7862369567396485e-05, |
| "loss": 4.0422, |
| "step": 130560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7853983619885965e-05, |
| "loss": 4.0443, |
| "step": 131072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7845597672375445e-05, |
| "loss": 4.0523, |
| "step": 131584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7837211724864925e-05, |
| "loss": 4.0529, |
| "step": 132096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.782882577735441e-05, |
| "loss": 4.0603, |
| "step": 132608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.782043982984389e-05, |
| "loss": 4.0472, |
| "step": 133120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.78120702611371e-05, |
| "loss": 4.0449, |
| "step": 133632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.780368431362658e-05, |
| "loss": 4.0553, |
| "step": 134144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.779529836611606e-05, |
| "loss": 4.0625, |
| "step": 134656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.778691241860554e-05, |
| "loss": 4.0489, |
| "step": 135168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777852647109502e-05, |
| "loss": 4.0545, |
| "step": 135680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777015690238823e-05, |
| "loss": 4.0463, |
| "step": 136192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.776177095487771e-05, |
| "loss": 4.0466, |
| "step": 136704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.775338500736719e-05, |
| "loss": 4.053, |
| "step": 137216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77450154386604e-05, |
| "loss": 4.0388, |
| "step": 137728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.773662949114988e-05, |
| "loss": 4.0401, |
| "step": 138240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.772824354363936e-05, |
| "loss": 4.0383, |
| "step": 138752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7719857596128845e-05, |
| "loss": 4.0374, |
| "step": 139264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.771147164861832e-05, |
| "loss": 4.0412, |
| "step": 139776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77030857011078e-05, |
| "loss": 4.045, |
| "step": 140288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.769469975359728e-05, |
| "loss": 4.0507, |
| "step": 140800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.768631380608676e-05, |
| "loss": 4.0309, |
| "step": 141312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.767794423737997e-05, |
| "loss": 4.038, |
| "step": 141824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.766955828986945e-05, |
| "loss": 4.0499, |
| "step": 142336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.766117234235893e-05, |
| "loss": 4.0353, |
| "step": 142848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.765278639484841e-05, |
| "loss": 4.0483, |
| "step": 143360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7644416826141616e-05, |
| "loss": 4.0335, |
| "step": 143872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7636030878631096e-05, |
| "loss": 4.05, |
| "step": 144384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.762764493112058e-05, |
| "loss": 4.0397, |
| "step": 144896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761925898361006e-05, |
| "loss": 4.023, |
| "step": 145408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761088941490327e-05, |
| "loss": 4.023, |
| "step": 145920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.760250346739275e-05, |
| "loss": 4.0282, |
| "step": 146432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.759411751988223e-05, |
| "loss": 4.0403, |
| "step": 146944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.758573157237171e-05, |
| "loss": 4.0351, |
| "step": 147456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.757736200366492e-05, |
| "loss": 4.038, |
| "step": 147968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.75689760561544e-05, |
| "loss": 4.0311, |
| "step": 148480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.756059010864388e-05, |
| "loss": 4.0242, |
| "step": 148992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.755220416113336e-05, |
| "loss": 4.0243, |
| "step": 149504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.754383459242657e-05, |
| "loss": 4.0186, |
| "step": 150016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.753544864491605e-05, |
| "loss": 4.0264, |
| "step": 150528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7527062697405536e-05, |
| "loss": 4.042, |
| "step": 151040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7518676749895016e-05, |
| "loss": 4.0257, |
| "step": 151552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7510307181188225e-05, |
| "loss": 4.02, |
| "step": 152064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7501921233677705e-05, |
| "loss": 4.0164, |
| "step": 152576 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.021358489990234, |
| "eval_runtime": 573.4841, |
| "eval_samples_per_second": 665.391, |
| "eval_steps_per_second": 20.794, |
| "step": 152638 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493551664970914e-05, |
| "loss": 4.0235, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7485165717460394e-05, |
| "loss": 4.0212, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7476779769949874e-05, |
| "loss": 4.013, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7468393822439354e-05, |
| "loss": 4.0163, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460007874928834e-05, |
| "loss": 4.014, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451621927418314e-05, |
| "loss": 3.9954, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443235979907794e-05, |
| "loss": 4.0035, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434850032397274e-05, |
| "loss": 4.0169, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426464084886754e-05, |
| "loss": 3.9997, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741809451617997e-05, |
| "loss": 4.0077, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740970856866945e-05, |
| "loss": 4.0156, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740132262115893e-05, |
| "loss": 4.013, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739295305245214e-05, |
| "loss": 4.0024, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738456710494162e-05, |
| "loss": 4.0089, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73761811574311e-05, |
| "loss": 4.0003, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736779520992058e-05, |
| "loss": 3.9933, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735940926241006e-05, |
| "loss": 3.9974, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735102331489954e-05, |
| "loss": 4.0036, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734263736738902e-05, |
| "loss": 4.0044, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73342514198785e-05, |
| "loss": 4.0066, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732586547236797e-05, |
| "loss": 3.9932, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731747952485746e-05, |
| "loss": 4.0049, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730909357734694e-05, |
| "loss": 3.9955, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7300724008640154e-05, |
| "loss": 4.0007, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729233806112963e-05, |
| "loss": 3.995, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728395211361911e-05, |
| "loss": 3.9944, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727556616610859e-05, |
| "loss": 4.0008, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.72671965974018e-05, |
| "loss": 3.9825, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258810649891277e-05, |
| "loss": 3.9961, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7250424702380757e-05, |
| "loss": 3.9847, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7242038754870237e-05, |
| "loss": 3.9875, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7233669186163446e-05, |
| "loss": 3.9901, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225283238652926e-05, |
| "loss": 3.9941, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721689729114241e-05, |
| "loss": 3.9878, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720851134363189e-05, |
| "loss": 3.9793, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.72001417749251e-05, |
| "loss": 3.9938, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719175582741458e-05, |
| "loss": 3.9839, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718336987990406e-05, |
| "loss": 3.9973, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717498393239354e-05, |
| "loss": 3.9811, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716661436368675e-05, |
| "loss": 3.9697, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715822841617623e-05, |
| "loss": 3.9731, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714984246866571e-05, |
| "loss": 3.9746, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714145652115519e-05, |
| "loss": 3.9902, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71330869524484e-05, |
| "loss": 3.9687, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712470100493788e-05, |
| "loss": 3.984, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7116315057427366e-05, |
| "loss": 3.9794, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107929109916846e-05, |
| "loss": 3.9778, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099559541210055e-05, |
| "loss": 3.9846, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091173593699535e-05, |
| "loss": 3.9593, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082787646189015e-05, |
| "loss": 3.9662, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074401698678495e-05, |
| "loss": 3.9602, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7066032129971704e-05, |
| "loss": 3.9814, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7057646182461184e-05, |
| "loss": 3.9606, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7049260234950664e-05, |
| "loss": 3.9795, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7040874287440144e-05, |
| "loss": 3.9573, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032488339929624e-05, |
| "loss": 3.9583, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024102392419104e-05, |
| "loss": 3.9637, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015716444908584e-05, |
| "loss": 3.9584, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70073468762018e-05, |
| "loss": 3.9656, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699896092869128e-05, |
| "loss": 3.9626, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699057498118076e-05, |
| "loss": 3.9481, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698218903367024e-05, |
| "loss": 3.9645, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697381946496345e-05, |
| "loss": 3.9562, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696543351745293e-05, |
| "loss": 3.9607, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695704756994241e-05, |
| "loss": 3.9531, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694866162243189e-05, |
| "loss": 3.9598, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69402920537251e-05, |
| "loss": 3.9405, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693190610621458e-05, |
| "loss": 3.9621, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692352015870406e-05, |
| "loss": 3.9489, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691513421119354e-05, |
| "loss": 3.9571, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690676464248675e-05, |
| "loss": 3.9555, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689837869497623e-05, |
| "loss": 3.9515, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688999274746571e-05, |
| "loss": 3.965, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688160679995519e-05, |
| "loss": 3.9718, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68732372312484e-05, |
| "loss": 3.9671, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686485128373788e-05, |
| "loss": 3.9559, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685646533622736e-05, |
| "loss": 3.9443, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684807938871684e-05, |
| "loss": 3.9369, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683970982001005e-05, |
| "loss": 3.9566, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683132387249953e-05, |
| "loss": 3.9623, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682293792498901e-05, |
| "loss": 3.9598, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681455197747849e-05, |
| "loss": 3.9423, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680618240877171e-05, |
| "loss": 3.9382, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679779646126119e-05, |
| "loss": 3.9468, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678941051375067e-05, |
| "loss": 3.9368, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678102456624015e-05, |
| "loss": 3.9441, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6772654997533356e-05, |
| "loss": 3.9503, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6764269050022836e-05, |
| "loss": 3.9515, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755883102512316e-05, |
| "loss": 3.9443, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6747513533805525e-05, |
| "loss": 3.9466, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6739127586295005e-05, |
| "loss": 3.9364, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6730741638784485e-05, |
| "loss": 3.9473, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722355691273965e-05, |
| "loss": 3.9421, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713969743763445e-05, |
| "loss": 3.9311, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705583796252925e-05, |
| "loss": 3.9362, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.669721422754614e-05, |
| "loss": 3.9461, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668882828003562e-05, |
| "loss": 3.9543, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66804423325251e-05, |
| "loss": 3.9418, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667205638501458e-05, |
| "loss": 3.9397, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663670437504054e-05, |
| "loss": 3.9464, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6655284489993534e-05, |
| "loss": 3.9267, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6646898542483013e-05, |
| "loss": 3.9445, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6638512594972493e-05, |
| "loss": 3.9484, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663015940506944e-05, |
| "loss": 3.9366, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662177345755892e-05, |
| "loss": 3.9453, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66133875100484e-05, |
| "loss": 3.9215, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660500156253788e-05, |
| "loss": 3.919, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659661561502736e-05, |
| "loss": 3.9327, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658822966751684e-05, |
| "loss": 3.9315, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657984372000632e-05, |
| "loss": 3.9407, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65714577724958e-05, |
| "loss": 3.9262, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656308820378901e-05, |
| "loss": 3.9234, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655470225627849e-05, |
| "loss": 3.9304, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654631630876797e-05, |
| "loss": 3.943, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653793036125745e-05, |
| "loss": 3.9323, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529560792550656e-05, |
| "loss": 3.9396, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6521174845040136e-05, |
| "loss": 3.9249, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512788897529616e-05, |
| "loss": 3.925, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6504402950019096e-05, |
| "loss": 3.9398, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649604976011605e-05, |
| "loss": 3.9217, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648766381260553e-05, |
| "loss": 3.9232, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479277865095e-05, |
| "loss": 3.9222, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647089191758448e-05, |
| "loss": 3.9193, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.646250597007396e-05, |
| "loss": 3.9248, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645412002256344e-05, |
| "loss": 3.9264, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644573407505292e-05, |
| "loss": 3.9381, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64373481275424e-05, |
| "loss": 3.9164, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642897855883561e-05, |
| "loss": 3.9204, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642059261132509e-05, |
| "loss": 3.9392, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641220666381457e-05, |
| "loss": 3.9199, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6403837095107786e-05, |
| "loss": 3.9367, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6395451147597266e-05, |
| "loss": 3.9232, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6387065200086746e-05, |
| "loss": 3.9351, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6378679252576226e-05, |
| "loss": 3.9274, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6370293305065705e-05, |
| "loss": 3.9129, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6361907357555185e-05, |
| "loss": 3.9144, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6353521410044665e-05, |
| "loss": 3.9147, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6345135462534145e-05, |
| "loss": 3.9261, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6336765893827354e-05, |
| "loss": 3.9289, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6328379946316834e-05, |
| "loss": 3.9246, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6319993998806314e-05, |
| "loss": 3.9204, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6311608051295794e-05, |
| "loss": 3.9152, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6303238482589003e-05, |
| "loss": 3.916, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629485253507848e-05, |
| "loss": 3.909, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628646658756797e-05, |
| "loss": 3.9198, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.627808064005745e-05, |
| "loss": 3.9292, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626971107135066e-05, |
| "loss": 3.9176, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626132512384014e-05, |
| "loss": 3.9138, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625295555513335e-05, |
| "loss": 3.9108, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.946960926055908, |
| "eval_runtime": 561.2521, |
| "eval_samples_per_second": 679.892, |
| "eval_steps_per_second": 21.247, |
| "step": 228957 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.624456960762283e-05, |
| "loss": 3.9148, |
| "step": 229376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.623618366011231e-05, |
| "loss": 3.9194, |
| "step": 229888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.622779771260179e-05, |
| "loss": 3.9081, |
| "step": 230400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.621941176509127e-05, |
| "loss": 3.9091, |
| "step": 230912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.621102581758075e-05, |
| "loss": 3.9116, |
| "step": 231424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.620263987007023e-05, |
| "loss": 3.8901, |
| "step": 231936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.619425392255971e-05, |
| "loss": 3.8972, |
| "step": 232448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.618586797504919e-05, |
| "loss": 3.9123, |
| "step": 232960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6177498406342404e-05, |
| "loss": 3.9026, |
| "step": 233472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6169112458831884e-05, |
| "loss": 3.9083, |
| "step": 233984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6160726511321364e-05, |
| "loss": 3.9097, |
| "step": 234496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.615234056381084e-05, |
| "loss": 3.9069, |
| "step": 235008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.614397099510405e-05, |
| "loss": 3.9001, |
| "step": 235520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.613558504759353e-05, |
| "loss": 3.9101, |
| "step": 236032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6127199100083006e-05, |
| "loss": 3.8955, |
| "step": 236544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6118813152572486e-05, |
| "loss": 3.8933, |
| "step": 237056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6110427205061966e-05, |
| "loss": 3.8944, |
| "step": 237568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6102041257551446e-05, |
| "loss": 3.9004, |
| "step": 238080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6093655310040926e-05, |
| "loss": 3.9046, |
| "step": 238592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6085269362530406e-05, |
| "loss": 3.9084, |
| "step": 239104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.607689979382362e-05, |
| "loss": 3.8949, |
| "step": 239616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.60685138463131e-05, |
| "loss": 3.9076, |
| "step": 240128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606012789880258e-05, |
| "loss": 3.8985, |
| "step": 240640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.605174195129206e-05, |
| "loss": 3.8976, |
| "step": 241152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.604335600378154e-05, |
| "loss": 3.8984, |
| "step": 241664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.603497005627102e-05, |
| "loss": 3.8927, |
| "step": 242176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.60265841087605e-05, |
| "loss": 3.9022, |
| "step": 242688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.601819816124998e-05, |
| "loss": 3.8863, |
| "step": 243200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.600982859254319e-05, |
| "loss": 3.9018, |
| "step": 243712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.600144264503267e-05, |
| "loss": 3.8868, |
| "step": 244224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.599305669752215e-05, |
| "loss": 3.8888, |
| "step": 244736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.598467075001163e-05, |
| "loss": 3.895, |
| "step": 245248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5976317560108575e-05, |
| "loss": 3.9025, |
| "step": 245760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5967931612598055e-05, |
| "loss": 3.8881, |
| "step": 246272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5959545665087535e-05, |
| "loss": 3.8911, |
| "step": 246784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5951159717577015e-05, |
| "loss": 3.8944, |
| "step": 247296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5942773770066495e-05, |
| "loss": 3.8894, |
| "step": 247808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5934404201359704e-05, |
| "loss": 3.9076, |
| "step": 248320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5926018253849184e-05, |
| "loss": 3.8868, |
| "step": 248832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5917632306338664e-05, |
| "loss": 3.8762, |
| "step": 249344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5909246358828144e-05, |
| "loss": 3.8819, |
| "step": 249856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.590087679012135e-05, |
| "loss": 3.8762, |
| "step": 250368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.589249084261083e-05, |
| "loss": 3.8977, |
| "step": 250880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.588410489510031e-05, |
| "loss": 3.8819, |
| "step": 251392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.58757189475898e-05, |
| "loss": 3.8898, |
| "step": 251904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.586733300007928e-05, |
| "loss": 3.8842, |
| "step": 252416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585894705256876e-05, |
| "loss": 3.888, |
| "step": 252928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585056110505824e-05, |
| "loss": 3.8913, |
| "step": 253440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.584217515754772e-05, |
| "loss": 3.8705, |
| "step": 253952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.583380558884093e-05, |
| "loss": 3.8703, |
| "step": 254464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.582541964133041e-05, |
| "loss": 3.8748, |
| "step": 254976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.581705007262362e-05, |
| "loss": 3.8894, |
| "step": 255488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.58086641251131e-05, |
| "loss": 3.8717, |
| "step": 256000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.580027817760258e-05, |
| "loss": 3.8859, |
| "step": 256512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.579189223009206e-05, |
| "loss": 3.8689, |
| "step": 257024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5783522661385267e-05, |
| "loss": 3.8667, |
| "step": 257536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.577513671387475e-05, |
| "loss": 3.8775, |
| "step": 258048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.576675076636423e-05, |
| "loss": 3.8652, |
| "step": 258560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.575836481885371e-05, |
| "loss": 3.8819, |
| "step": 259072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.574997887134319e-05, |
| "loss": 3.8763, |
| "step": 259584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.57416093026364e-05, |
| "loss": 3.8596, |
| "step": 260096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.573322335512588e-05, |
| "loss": 3.8747, |
| "step": 260608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.572483740761536e-05, |
| "loss": 3.8676, |
| "step": 261120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.571645146010484e-05, |
| "loss": 3.8756, |
| "step": 261632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5708065512594315e-05, |
| "loss": 3.8635, |
| "step": 262144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569969594388753e-05, |
| "loss": 3.8741, |
| "step": 262656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569130999637701e-05, |
| "loss": 3.8572, |
| "step": 263168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.568292404886649e-05, |
| "loss": 3.872, |
| "step": 263680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.567453810135597e-05, |
| "loss": 3.8588, |
| "step": 264192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.566615215384545e-05, |
| "loss": 3.8674, |
| "step": 264704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.565776620633493e-05, |
| "loss": 3.8704, |
| "step": 265216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564939663762814e-05, |
| "loss": 3.8704, |
| "step": 265728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564101069011762e-05, |
| "loss": 3.8742, |
| "step": 266240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.56326247426071e-05, |
| "loss": 3.8881, |
| "step": 266752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.562423879509658e-05, |
| "loss": 3.8813, |
| "step": 267264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.561586922638979e-05, |
| "loss": 3.8695, |
| "step": 267776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.560748327887927e-05, |
| "loss": 3.8628, |
| "step": 268288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559909733136875e-05, |
| "loss": 3.856, |
| "step": 268800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559071138385823e-05, |
| "loss": 3.87, |
| "step": 269312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5582341815151445e-05, |
| "loss": 3.8789, |
| "step": 269824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5573955867640925e-05, |
| "loss": 3.8758, |
| "step": 270336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5565569920130405e-05, |
| "loss": 3.8578, |
| "step": 270848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5557183972619885e-05, |
| "loss": 3.8556, |
| "step": 271360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5548798025109365e-05, |
| "loss": 3.8688, |
| "step": 271872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5540428456402574e-05, |
| "loss": 3.8484, |
| "step": 272384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5532042508892054e-05, |
| "loss": 3.8637, |
| "step": 272896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5523656561381534e-05, |
| "loss": 3.8648, |
| "step": 273408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5515270613871014e-05, |
| "loss": 3.868, |
| "step": 273920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5506884666360494e-05, |
| "loss": 3.8651, |
| "step": 274432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54985150976537e-05, |
| "loss": 3.8682, |
| "step": 274944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.549012915014318e-05, |
| "loss": 3.8534, |
| "step": 275456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.548174320263266e-05, |
| "loss": 3.8638, |
| "step": 275968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.547335725512214e-05, |
| "loss": 3.8587, |
| "step": 276480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.546498768641536e-05, |
| "loss": 3.8545, |
| "step": 276992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.545660173890484e-05, |
| "loss": 3.8546, |
| "step": 277504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.544821579139432e-05, |
| "loss": 3.8694, |
| "step": 278016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54398298438838e-05, |
| "loss": 3.8694, |
| "step": 278528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543144389637328e-05, |
| "loss": 3.8677, |
| "step": 279040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.542305794886276e-05, |
| "loss": 3.8574, |
| "step": 279552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.541467200135224e-05, |
| "loss": 3.8662, |
| "step": 280064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.540628605384172e-05, |
| "loss": 3.8499, |
| "step": 280576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.539791648513493e-05, |
| "loss": 3.8619, |
| "step": 281088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5389546916428136e-05, |
| "loss": 3.8686, |
| "step": 281600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5381160968917616e-05, |
| "loss": 3.8562, |
| "step": 282112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5372775021407096e-05, |
| "loss": 3.869, |
| "step": 282624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5364389073896576e-05, |
| "loss": 3.839, |
| "step": 283136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.535600312638606e-05, |
| "loss": 3.8419, |
| "step": 283648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.534763355767927e-05, |
| "loss": 3.854, |
| "step": 284160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.533924761016875e-05, |
| "loss": 3.8545, |
| "step": 284672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.533086166265823e-05, |
| "loss": 3.8618, |
| "step": 285184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.532247571514771e-05, |
| "loss": 3.8477, |
| "step": 285696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.531408976763719e-05, |
| "loss": 3.8448, |
| "step": 286208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.53057201989304e-05, |
| "loss": 3.853, |
| "step": 286720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.529733425141988e-05, |
| "loss": 3.8649, |
| "step": 287232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.528894830390936e-05, |
| "loss": 3.8569, |
| "step": 287744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.528056235639884e-05, |
| "loss": 3.8625, |
| "step": 288256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.527217640888832e-05, |
| "loss": 3.8478, |
| "step": 288768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.526380684018153e-05, |
| "loss": 3.8469, |
| "step": 289280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5255420892671017e-05, |
| "loss": 3.8651, |
| "step": 289792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5247034945160496e-05, |
| "loss": 3.8467, |
| "step": 290304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5238648997649976e-05, |
| "loss": 3.8435, |
| "step": 290816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5230279428943186e-05, |
| "loss": 3.8484, |
| "step": 291328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5221893481432665e-05, |
| "loss": 3.8407, |
| "step": 291840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5213507533922145e-05, |
| "loss": 3.8528, |
| "step": 292352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5205121586411625e-05, |
| "loss": 3.8502, |
| "step": 292864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51967356389011e-05, |
| "loss": 3.8643, |
| "step": 293376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.518834969139058e-05, |
| "loss": 3.8417, |
| "step": 293888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5179980122683794e-05, |
| "loss": 3.8425, |
| "step": 294400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5171594175173274e-05, |
| "loss": 3.8678, |
| "step": 294912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5163208227662754e-05, |
| "loss": 3.8427, |
| "step": 295424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5154822280152234e-05, |
| "loss": 3.8577, |
| "step": 295936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5146436332641714e-05, |
| "loss": 3.8522, |
| "step": 296448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5138050385131194e-05, |
| "loss": 3.8599, |
| "step": 296960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51296808164244e-05, |
| "loss": 3.8515, |
| "step": 297472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.512129486891388e-05, |
| "loss": 3.8449, |
| "step": 297984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.511290892140336e-05, |
| "loss": 3.838, |
| "step": 298496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.510452297389284e-05, |
| "loss": 3.8403, |
| "step": 299008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.509615340518605e-05, |
| "loss": 3.8493, |
| "step": 299520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.508776745767553e-05, |
| "loss": 3.8571, |
| "step": 300032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507938151016501e-05, |
| "loss": 3.8492, |
| "step": 300544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507099556265449e-05, |
| "loss": 3.8521, |
| "step": 301056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.506262599394771e-05, |
| "loss": 3.8406, |
| "step": 301568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.505424004643719e-05, |
| "loss": 3.8453, |
| "step": 302080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.504585409892667e-05, |
| "loss": 3.8336, |
| "step": 302592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.503746815141615e-05, |
| "loss": 3.8493, |
| "step": 303104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502908220390563e-05, |
| "loss": 3.8526, |
| "step": 303616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502071263519884e-05, |
| "loss": 3.8532, |
| "step": 304128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.501232668768832e-05, |
| "loss": 3.8393, |
| "step": 304640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.50039407401778e-05, |
| "loss": 3.8387, |
| "step": 305152 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.905977487564087, |
| "eval_runtime": 573.4819, |
| "eval_samples_per_second": 665.393, |
| "eval_steps_per_second": 20.794, |
| "step": 305276 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.499555479266728e-05, |
| "loss": 3.847, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.498716884515676e-05, |
| "loss": 3.8433, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497878289764624e-05, |
| "loss": 3.8395, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497039695013572e-05, |
| "loss": 3.8392, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4962011002625197e-05, |
| "loss": 3.839, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4953625055114677e-05, |
| "loss": 3.8243, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4945239107604156e-05, |
| "loss": 3.8286, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4936853160093636e-05, |
| "loss": 3.8343, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492848359138685e-05, |
| "loss": 3.8348, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492009764387633e-05, |
| "loss": 3.8394, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.491171169636581e-05, |
| "loss": 3.8375, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4903325748855285e-05, |
| "loss": 3.8331, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48949561801485e-05, |
| "loss": 3.8319, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488657023263798e-05, |
| "loss": 3.8403, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487818428512746e-05, |
| "loss": 3.8297, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4869798337616934e-05, |
| "loss": 3.8231, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4861412390106414e-05, |
| "loss": 3.8245, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4853026442595894e-05, |
| "loss": 3.8353, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4844640495085374e-05, |
| "loss": 3.8323, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4836254547574854e-05, |
| "loss": 3.8431, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.482788497886807e-05, |
| "loss": 3.8271, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481949903135755e-05, |
| "loss": 3.8377, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481111308384703e-05, |
| "loss": 3.8315, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480272713633651e-05, |
| "loss": 3.8304, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479435756762972e-05, |
| "loss": 3.8289, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47859716201192e-05, |
| "loss": 3.8251, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477758567260868e-05, |
| "loss": 3.8342, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476919972509816e-05, |
| "loss": 3.8195, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476083015639137e-05, |
| "loss": 3.8328, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475244420888085e-05, |
| "loss": 3.8227, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474405826137033e-05, |
| "loss": 3.8161, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473567231385981e-05, |
| "loss": 3.83, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4727302745153024e-05, |
| "loss": 3.8356, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4718916797642504e-05, |
| "loss": 3.8257, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4710530850131984e-05, |
| "loss": 3.8222, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4702144902621464e-05, |
| "loss": 3.8279, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469377533391467e-05, |
| "loss": 3.8257, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468538938640415e-05, |
| "loss": 3.8402, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.467700343889363e-05, |
| "loss": 3.8216, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466861749138311e-05, |
| "loss": 3.8096, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466024792267632e-05, |
| "loss": 3.8167, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.46518619751658e-05, |
| "loss": 3.8132, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464347602765528e-05, |
| "loss": 3.8292, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463509008014476e-05, |
| "loss": 3.8173, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.462670413263425e-05, |
| "loss": 3.8277, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461833456392746e-05, |
| "loss": 3.8221, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460994861641694e-05, |
| "loss": 3.8203, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460156266890642e-05, |
| "loss": 3.8297, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45931767213959e-05, |
| "loss": 3.8084, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4584807152689106e-05, |
| "loss": 3.8052, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4576421205178586e-05, |
| "loss": 3.8157, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4568035257668066e-05, |
| "loss": 3.8218, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4559649310157546e-05, |
| "loss": 3.8068, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551279741450755e-05, |
| "loss": 3.8217, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542893793940235e-05, |
| "loss": 3.8063, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534507846429715e-05, |
| "loss": 3.8027, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45261218989192e-05, |
| "loss": 3.814, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451775233021241e-05, |
| "loss": 3.802, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450936638270189e-05, |
| "loss": 3.8208, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450098043519137e-05, |
| "loss": 3.8151, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.449259448768085e-05, |
| "loss": 3.7943, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448422491897406e-05, |
| "loss": 3.8113, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447583897146354e-05, |
| "loss": 3.8067, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446745302395302e-05, |
| "loss": 3.8185, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44590670764425e-05, |
| "loss": 3.8004, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445069750773571e-05, |
| "loss": 3.8119, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444231156022519e-05, |
| "loss": 3.7962, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443392561271467e-05, |
| "loss": 3.809, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4425539665204156e-05, |
| "loss": 3.7996, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4417153717693636e-05, |
| "loss": 3.8029, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4408767770183116e-05, |
| "loss": 3.8117, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4400398201476325e-05, |
| "loss": 3.8048, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4392012253965805e-05, |
| "loss": 3.8145, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4383626306455285e-05, |
| "loss": 3.8245, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4375240358944764e-05, |
| "loss": 3.8225, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4366870790237974e-05, |
| "loss": 3.8094, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4358484842727454e-05, |
| "loss": 3.8024, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4350098895216933e-05, |
| "loss": 3.7957, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4341712947706413e-05, |
| "loss": 3.8076, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433334337899962e-05, |
| "loss": 3.8232, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43249574314891e-05, |
| "loss": 3.8117, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431657148397859e-05, |
| "loss": 3.7988, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430818553646807e-05, |
| "loss": 3.7917, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429981596776128e-05, |
| "loss": 3.8133, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429143002025076e-05, |
| "loss": 3.7904, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.428304407274024e-05, |
| "loss": 3.8, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427465812522971e-05, |
| "loss": 3.8016, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.426628855652293e-05, |
| "loss": 3.8067, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.425790260901241e-05, |
| "loss": 3.8081, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424951666150189e-05, |
| "loss": 3.8105, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424113071399136e-05, |
| "loss": 3.7965, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4232761145284576e-05, |
| "loss": 3.8042, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4224375197774056e-05, |
| "loss": 3.802, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421598925026354e-05, |
| "loss": 3.7951, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4207603302753016e-05, |
| "loss": 3.795, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419925011284996e-05, |
| "loss": 3.8122, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419086416533944e-05, |
| "loss": 3.8103, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418247821782892e-05, |
| "loss": 3.811, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.41740922703184e-05, |
| "loss": 3.7999, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416570632280788e-05, |
| "loss": 3.805, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415732037529736e-05, |
| "loss": 3.7914, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4148934427786834e-05, |
| "loss": 3.8078, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4140548480276314e-05, |
| "loss": 3.8061, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4132162532765794e-05, |
| "loss": 3.7993, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.412379296405901e-05, |
| "loss": 3.8131, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411540701654849e-05, |
| "loss": 3.7846, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.410702106903797e-05, |
| "loss": 3.7861, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409863512152745e-05, |
| "loss": 3.7937, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409026555282066e-05, |
| "loss": 3.7955, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408187960531014e-05, |
| "loss": 3.804, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407349365779962e-05, |
| "loss": 3.7963, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40651077102891e-05, |
| "loss": 3.7828, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405673814158231e-05, |
| "loss": 3.7958, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404835219407179e-05, |
| "loss": 3.808, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403996624656127e-05, |
| "loss": 3.8007, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403158029905075e-05, |
| "loss": 3.8075, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4023210730343963e-05, |
| "loss": 3.7932, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4014824782833443e-05, |
| "loss": 3.7899, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4006438835322923e-05, |
| "loss": 3.8083, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39980528878124e-05, |
| "loss": 3.7923, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398968331910561e-05, |
| "loss": 3.7889, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398129737159509e-05, |
| "loss": 3.7892, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397291142408457e-05, |
| "loss": 3.7861, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396452547657405e-05, |
| "loss": 3.7978, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395615590786726e-05, |
| "loss": 3.7955, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394776996035674e-05, |
| "loss": 3.8052, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393938401284622e-05, |
| "loss": 3.7848, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39309980653357e-05, |
| "loss": 3.7887, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392262849662892e-05, |
| "loss": 3.8073, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39142425491184e-05, |
| "loss": 3.791, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.390585660160788e-05, |
| "loss": 3.8004, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.389747065409736e-05, |
| "loss": 3.8009, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3889101085390566e-05, |
| "loss": 3.8011, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3880715137880046e-05, |
| "loss": 3.7997, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872329190369526e-05, |
| "loss": 3.786, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3863943242859006e-05, |
| "loss": 3.7851, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3855573674152215e-05, |
| "loss": 3.7895, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3847187726641695e-05, |
| "loss": 3.7937, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3838801779131175e-05, |
| "loss": 3.7989, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3830415831620655e-05, |
| "loss": 3.7958, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382204626291387e-05, |
| "loss": 3.798, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.381366031540335e-05, |
| "loss": 3.7895, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380527436789283e-05, |
| "loss": 3.7906, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.379688842038231e-05, |
| "loss": 3.7778, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378851885167552e-05, |
| "loss": 3.7967, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3780132904165e-05, |
| "loss": 3.7984, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377174695665448e-05, |
| "loss": 3.7979, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.376336100914396e-05, |
| "loss": 3.7826, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375499144043717e-05, |
| "loss": 3.7867, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.881382703781128, |
| "eval_runtime": 571.5891, |
| "eval_samples_per_second": 667.597, |
| "eval_steps_per_second": 20.863, |
| "step": 381595 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.374662187173038e-05, |
| "loss": 3.8034, |
| "step": 381952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3738235924219865e-05, |
| "loss": 3.7859, |
| "step": 382464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3729849976709345e-05, |
| "loss": 3.7868, |
| "step": 382976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3721464029198824e-05, |
| "loss": 3.7839, |
| "step": 383488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3713078081688304e-05, |
| "loss": 3.7856, |
| "step": 384000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3704692134177784e-05, |
| "loss": 3.7733, |
| "step": 384512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3696306186667264e-05, |
| "loss": 3.7772, |
| "step": 385024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3687920239156744e-05, |
| "loss": 3.7763, |
| "step": 385536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3679550670449953e-05, |
| "loss": 3.7843, |
| "step": 386048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.367118110174316e-05, |
| "loss": 3.7897, |
| "step": 386560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.366279515423264e-05, |
| "loss": 3.7821, |
| "step": 387072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.365440920672212e-05, |
| "loss": 3.7816, |
| "step": 387584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.36460232592116e-05, |
| "loss": 3.7853, |
| "step": 388096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.363763731170108e-05, |
| "loss": 3.787, |
| "step": 388608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362925136419056e-05, |
| "loss": 3.7774, |
| "step": 389120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362086541668005e-05, |
| "loss": 3.771, |
| "step": 389632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.361247946916953e-05, |
| "loss": 3.773, |
| "step": 390144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.360410990046274e-05, |
| "loss": 3.7812, |
| "step": 390656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.359572395295222e-05, |
| "loss": 3.7807, |
| "step": 391168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.35873380054417e-05, |
| "loss": 3.7918, |
| "step": 391680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357895205793118e-05, |
| "loss": 3.7765, |
| "step": 392192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357058248922439e-05, |
| "loss": 3.7856, |
| "step": 392704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.356219654171387e-05, |
| "loss": 3.783, |
| "step": 393216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.355381059420335e-05, |
| "loss": 3.7788, |
| "step": 393728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.354542464669283e-05, |
| "loss": 3.7771, |
| "step": 394240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3537055077986036e-05, |
| "loss": 3.7762, |
| "step": 394752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3528669130475516e-05, |
| "loss": 3.7844, |
| "step": 395264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3520283182964996e-05, |
| "loss": 3.7666, |
| "step": 395776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.351189723545448e-05, |
| "loss": 3.7852, |
| "step": 396288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.350352766674769e-05, |
| "loss": 3.771, |
| "step": 396800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.349514171923717e-05, |
| "loss": 3.7642, |
| "step": 397312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.348675577172665e-05, |
| "loss": 3.7807, |
| "step": 397824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.347836982421613e-05, |
| "loss": 3.7819, |
| "step": 398336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.347000025550934e-05, |
| "loss": 3.7805, |
| "step": 398848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.346161430799882e-05, |
| "loss": 3.7737, |
| "step": 399360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.34532283604883e-05, |
| "loss": 3.7733, |
| "step": 399872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.344484241297778e-05, |
| "loss": 3.7733, |
| "step": 400384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.343647284427099e-05, |
| "loss": 3.7905, |
| "step": 400896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.342808689676047e-05, |
| "loss": 3.7733, |
| "step": 401408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.341970094924995e-05, |
| "loss": 3.7604, |
| "step": 401920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3411315001739436e-05, |
| "loss": 3.7673, |
| "step": 402432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3402945433032645e-05, |
| "loss": 3.7621, |
| "step": 402944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3394559485522125e-05, |
| "loss": 3.7779, |
| "step": 403456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3386173538011605e-05, |
| "loss": 3.7728, |
| "step": 403968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3377787590501085e-05, |
| "loss": 3.7736, |
| "step": 404480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3369418021794294e-05, |
| "loss": 3.7765, |
| "step": 404992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3361032074283774e-05, |
| "loss": 3.7722, |
| "step": 405504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3352646126773254e-05, |
| "loss": 3.7818, |
| "step": 406016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3344260179262734e-05, |
| "loss": 3.7601, |
| "step": 406528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.333589061055594e-05, |
| "loss": 3.7541, |
| "step": 407040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.332750466304542e-05, |
| "loss": 3.7702, |
| "step": 407552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.33191187155349e-05, |
| "loss": 3.7656, |
| "step": 408064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331073276802439e-05, |
| "loss": 3.762, |
| "step": 408576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.33023631993176e-05, |
| "loss": 3.7734, |
| "step": 409088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.329397725180708e-05, |
| "loss": 3.7563, |
| "step": 409600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.328559130429656e-05, |
| "loss": 3.757, |
| "step": 410112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.327720535678604e-05, |
| "loss": 3.7649, |
| "step": 410624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326883578807925e-05, |
| "loss": 3.755, |
| "step": 411136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326044984056873e-05, |
| "loss": 3.7709, |
| "step": 411648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.325206389305821e-05, |
| "loss": 3.7634, |
| "step": 412160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.324367794554768e-05, |
| "loss": 3.7553, |
| "step": 412672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.32353083768409e-05, |
| "loss": 3.7596, |
| "step": 413184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.322692242933038e-05, |
| "loss": 3.7542, |
| "step": 413696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.321853648181986e-05, |
| "loss": 3.7739, |
| "step": 414208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.321015053430934e-05, |
| "loss": 3.7559, |
| "step": 414720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.320178096560255e-05, |
| "loss": 3.7674, |
| "step": 415232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.319339501809203e-05, |
| "loss": 3.7481, |
| "step": 415744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3185009070581506e-05, |
| "loss": 3.759, |
| "step": 416256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3176623123070986e-05, |
| "loss": 3.7527, |
| "step": 416768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.31682535543642e-05, |
| "loss": 3.7543, |
| "step": 417280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.315986760685368e-05, |
| "loss": 3.7613, |
| "step": 417792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3151481659343155e-05, |
| "loss": 3.7593, |
| "step": 418304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3143095711832635e-05, |
| "loss": 3.7654, |
| "step": 418816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.313472614312585e-05, |
| "loss": 3.7764, |
| "step": 419328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.312634019561533e-05, |
| "loss": 3.775, |
| "step": 419840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.311795424810481e-05, |
| "loss": 3.7658, |
| "step": 420352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310956830059429e-05, |
| "loss": 3.757, |
| "step": 420864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3101198731887506e-05, |
| "loss": 3.7521, |
| "step": 421376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.309281278437698e-05, |
| "loss": 3.7585, |
| "step": 421888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.308442683686646e-05, |
| "loss": 3.7756, |
| "step": 422400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.307604088935594e-05, |
| "loss": 3.765, |
| "step": 422912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3067671320649155e-05, |
| "loss": 3.7542, |
| "step": 423424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.305928537313863e-05, |
| "loss": 3.7435, |
| "step": 423936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.305089942562811e-05, |
| "loss": 3.7705, |
| "step": 424448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.304251347811759e-05, |
| "loss": 3.7387, |
| "step": 424960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3034143909410804e-05, |
| "loss": 3.7571, |
| "step": 425472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3025757961900284e-05, |
| "loss": 3.7569, |
| "step": 425984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3017372014389764e-05, |
| "loss": 3.761, |
| "step": 426496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3008986066879244e-05, |
| "loss": 3.7606, |
| "step": 427008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.300061649817245e-05, |
| "loss": 3.7668, |
| "step": 427520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.299223055066193e-05, |
| "loss": 3.7472, |
| "step": 428032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.298384460315141e-05, |
| "loss": 3.7564, |
| "step": 428544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.297545865564089e-05, |
| "loss": 3.7587, |
| "step": 429056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29670890869341e-05, |
| "loss": 3.7495, |
| "step": 429568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.295870313942358e-05, |
| "loss": 3.7502, |
| "step": 430080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29503335707168e-05, |
| "loss": 3.766, |
| "step": 430592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.294194762320628e-05, |
| "loss": 3.7622, |
| "step": 431104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.293356167569576e-05, |
| "loss": 3.7706, |
| "step": 431616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.292517572818524e-05, |
| "loss": 3.7538, |
| "step": 432128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.291678978067472e-05, |
| "loss": 3.7575, |
| "step": 432640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29084038331642e-05, |
| "loss": 3.7447, |
| "step": 433152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.290001788565368e-05, |
| "loss": 3.7605, |
| "step": 433664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.289163193814316e-05, |
| "loss": 3.7633, |
| "step": 434176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.288326236943637e-05, |
| "loss": 3.7513, |
| "step": 434688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.287487642192585e-05, |
| "loss": 3.7657, |
| "step": 435200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.286649047441533e-05, |
| "loss": 3.7388, |
| "step": 435712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.285810452690481e-05, |
| "loss": 3.7447, |
| "step": 436224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2849734958198016e-05, |
| "loss": 3.7443, |
| "step": 436736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2841349010687496e-05, |
| "loss": 3.7521, |
| "step": 437248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2832963063176976e-05, |
| "loss": 3.7602, |
| "step": 437760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2824577115666456e-05, |
| "loss": 3.7554, |
| "step": 438272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.281620754695967e-05, |
| "loss": 3.7306, |
| "step": 438784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.280782159944915e-05, |
| "loss": 3.7568, |
| "step": 439296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.279943565193863e-05, |
| "loss": 3.7611, |
| "step": 439808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.279104970442811e-05, |
| "loss": 3.7597, |
| "step": 440320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.278268013572132e-05, |
| "loss": 3.7582, |
| "step": 440832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.27742941882108e-05, |
| "loss": 3.7497, |
| "step": 441344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.276590824070028e-05, |
| "loss": 3.7457, |
| "step": 441856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.275752229318976e-05, |
| "loss": 3.7659, |
| "step": 442368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274915272448297e-05, |
| "loss": 3.7481, |
| "step": 442880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274076677697245e-05, |
| "loss": 3.7476, |
| "step": 443392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.273238082946193e-05, |
| "loss": 3.7426, |
| "step": 443904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.272399488195141e-05, |
| "loss": 3.7462, |
| "step": 444416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2715625313244625e-05, |
| "loss": 3.7547, |
| "step": 444928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2707239365734105e-05, |
| "loss": 3.7526, |
| "step": 445440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2698853418223585e-05, |
| "loss": 3.761, |
| "step": 445952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2690467470713065e-05, |
| "loss": 3.7459, |
| "step": 446464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2682097902006274e-05, |
| "loss": 3.7435, |
| "step": 446976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2673711954495754e-05, |
| "loss": 3.7613, |
| "step": 447488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2665326006985234e-05, |
| "loss": 3.7457, |
| "step": 448000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2656940059474714e-05, |
| "loss": 3.7557, |
| "step": 448512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264857049076792e-05, |
| "loss": 3.7595, |
| "step": 449024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26401845432574e-05, |
| "loss": 3.7588, |
| "step": 449536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.263179859574688e-05, |
| "loss": 3.7558, |
| "step": 450048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.262341264823636e-05, |
| "loss": 3.7446, |
| "step": 450560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.261504307952958e-05, |
| "loss": 3.7372, |
| "step": 451072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.260665713201906e-05, |
| "loss": 3.7473, |
| "step": 451584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.259827118450854e-05, |
| "loss": 3.7511, |
| "step": 452096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.258988523699802e-05, |
| "loss": 3.7601, |
| "step": 452608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.258151566829123e-05, |
| "loss": 3.7517, |
| "step": 453120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.257312972078071e-05, |
| "loss": 3.7515, |
| "step": 453632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.256474377327019e-05, |
| "loss": 3.75, |
| "step": 454144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.255635782575967e-05, |
| "loss": 3.751, |
| "step": 454656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.254798825705288e-05, |
| "loss": 3.7387, |
| "step": 455168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253960230954236e-05, |
| "loss": 3.7456, |
| "step": 455680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253121636203184e-05, |
| "loss": 3.7587, |
| "step": 456192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.252283041452132e-05, |
| "loss": 3.7568, |
| "step": 456704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.251446084581453e-05, |
| "loss": 3.741, |
| "step": 457216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.250607489830401e-05, |
| "loss": 3.7413, |
| "step": 457728 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8656201362609863, |
| "eval_runtime": 575.9172, |
| "eval_samples_per_second": 662.58, |
| "eval_steps_per_second": 20.706, |
| "step": 457914 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249770532959722e-05, |
| "loss": 3.7627, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24893193820867e-05, |
| "loss": 3.7451, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248093343457618e-05, |
| "loss": 3.7414, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247254748706566e-05, |
| "loss": 3.7452, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246416153955514e-05, |
| "loss": 3.7387, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245577559204462e-05, |
| "loss": 3.7331, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24473896445341e-05, |
| "loss": 3.7332, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243900369702358e-05, |
| "loss": 3.7367, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243063412831679e-05, |
| "loss": 3.744, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242224818080627e-05, |
| "loss": 3.7451, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2413878612099486e-05, |
| "loss": 3.742, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2405492664588966e-05, |
| "loss": 3.7348, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2397106717078446e-05, |
| "loss": 3.7491, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2388720769567926e-05, |
| "loss": 3.7428, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2380351200861135e-05, |
| "loss": 3.7408, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2371965253350615e-05, |
| "loss": 3.7267, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2363579305840095e-05, |
| "loss": 3.7297, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2355193358329575e-05, |
| "loss": 3.744, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2346807410819055e-05, |
| "loss": 3.7381, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233842146330853e-05, |
| "loss": 3.7505, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233003551579801e-05, |
| "loss": 3.7341, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232164956828749e-05, |
| "loss": 3.7466, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2313279999580704e-05, |
| "loss": 3.741, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230491043087392e-05, |
| "loss": 3.7371, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.22965244833634e-05, |
| "loss": 3.7358, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228813853585288e-05, |
| "loss": 3.7349, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227975258834236e-05, |
| "loss": 3.7427, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227136664083183e-05, |
| "loss": 3.7295, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226298069332131e-05, |
| "loss": 3.7434, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225459474581079e-05, |
| "loss": 3.7341, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.224620879830027e-05, |
| "loss": 3.7217, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.223783922959348e-05, |
| "loss": 3.7395, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222945328208296e-05, |
| "loss": 3.7409, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222106733457244e-05, |
| "loss": 3.7443, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221268138706193e-05, |
| "loss": 3.7306, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220431181835514e-05, |
| "loss": 3.7363, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219592587084462e-05, |
| "loss": 3.7323, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21875399233341e-05, |
| "loss": 3.7464, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217915397582358e-05, |
| "loss": 3.7361, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2170784407116787e-05, |
| "loss": 3.7209, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2162398459606267e-05, |
| "loss": 3.7262, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2154012512095746e-05, |
| "loss": 3.7204, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2145626564585226e-05, |
| "loss": 3.7414, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2137256995878436e-05, |
| "loss": 3.7319, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128871048367915e-05, |
| "loss": 3.7329, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2120485100857395e-05, |
| "loss": 3.7392, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211209915334688e-05, |
| "loss": 3.7291, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.210372958464009e-05, |
| "loss": 3.7416, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209534363712957e-05, |
| "loss": 3.7214, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208695768961905e-05, |
| "loss": 3.7159, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207858812091226e-05, |
| "loss": 3.729, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207020217340174e-05, |
| "loss": 3.7234, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206181622589122e-05, |
| "loss": 3.7273, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20534302783807e-05, |
| "loss": 3.7303, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204506070967391e-05, |
| "loss": 3.7229, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203667476216339e-05, |
| "loss": 3.718, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.202828881465287e-05, |
| "loss": 3.7232, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201990286714235e-05, |
| "loss": 3.7201, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2011533298435565e-05, |
| "loss": 3.7302, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2003147350925045e-05, |
| "loss": 3.7247, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1994761403414525e-05, |
| "loss": 3.7147, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1986391834707734e-05, |
| "loss": 3.722, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1978005887197214e-05, |
| "loss": 3.7098, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1969619939686694e-05, |
| "loss": 3.7386, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1961233992176174e-05, |
| "loss": 3.7192, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1952848044665654e-05, |
| "loss": 3.7254, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1944462097155134e-05, |
| "loss": 3.7144, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1936076149644614e-05, |
| "loss": 3.7136, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1927690202134094e-05, |
| "loss": 3.7175, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19193206334273e-05, |
| "loss": 3.7162, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191093468591679e-05, |
| "loss": 3.7251, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.190254873840627e-05, |
| "loss": 3.7203, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189416279089575e-05, |
| "loss": 3.7282, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188579322218896e-05, |
| "loss": 3.7372, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187740727467844e-05, |
| "loss": 3.7332, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186902132716792e-05, |
| "loss": 3.7309, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18606353796574e-05, |
| "loss": 3.7191, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185226581095061e-05, |
| "loss": 3.7137, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184387986344009e-05, |
| "loss": 3.7209, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1835510294733297e-05, |
| "loss": 3.7353, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1827124347222776e-05, |
| "loss": 3.727, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1818738399712256e-05, |
| "loss": 3.7168, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181035245220174e-05, |
| "loss": 3.7037, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180196650469122e-05, |
| "loss": 3.7347, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.17935805571807e-05, |
| "loss": 3.6986, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178519460967018e-05, |
| "loss": 3.7181, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177682504096339e-05, |
| "loss": 3.7224, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176843909345287e-05, |
| "loss": 3.721, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176005314594235e-05, |
| "loss": 3.7277, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175166719843183e-05, |
| "loss": 3.7311, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174329762972504e-05, |
| "loss": 3.7067, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173491168221452e-05, |
| "loss": 3.7214, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1726525734704e-05, |
| "loss": 3.7223, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171813978719348e-05, |
| "loss": 3.7136, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170977021848669e-05, |
| "loss": 3.7107, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1701400649779906e-05, |
| "loss": 3.7301, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1693014702269386e-05, |
| "loss": 3.7258, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1684628754758866e-05, |
| "loss": 3.7361, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1676242807248346e-05, |
| "loss": 3.7139, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1667856859737826e-05, |
| "loss": 3.7231, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1659470912227306e-05, |
| "loss": 3.7084, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1651084964716786e-05, |
| "loss": 3.7231, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.164269901720626e-05, |
| "loss": 3.7265, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1634329448499475e-05, |
| "loss": 3.7221, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1625943500988955e-05, |
| "loss": 3.7266, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1617557553478435e-05, |
| "loss": 3.7045, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1609171605967915e-05, |
| "loss": 3.7043, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160081841606486e-05, |
| "loss": 3.7048, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159243246855434e-05, |
| "loss": 3.7151, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158404652104382e-05, |
| "loss": 3.7233, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15756605735333e-05, |
| "loss": 3.7228, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156727462602278e-05, |
| "loss": 3.6953, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155888867851226e-05, |
| "loss": 3.7186, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155050273100173e-05, |
| "loss": 3.7249, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154211678349121e-05, |
| "loss": 3.7234, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153374721478443e-05, |
| "loss": 3.7213, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152536126727391e-05, |
| "loss": 3.7136, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151697531976338e-05, |
| "loss": 3.7129, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150858937225287e-05, |
| "loss": 3.7248, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1500219803546084e-05, |
| "loss": 3.7154, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149183385603556e-05, |
| "loss": 3.711, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148344790852504e-05, |
| "loss": 3.705, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147506196101452e-05, |
| "loss": 3.708, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146669239230773e-05, |
| "loss": 3.7181, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1458306444797206e-05, |
| "loss": 3.7164, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1449920497286686e-05, |
| "loss": 3.7226, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1441534549776166e-05, |
| "loss": 3.7111, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143316498106938e-05, |
| "loss": 3.707, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1424779033558855e-05, |
| "loss": 3.7268, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1416393086048335e-05, |
| "loss": 3.7067, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140800713853782e-05, |
| "loss": 3.7212, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139963756983103e-05, |
| "loss": 3.7271, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139125162232051e-05, |
| "loss": 3.7193, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138286567480999e-05, |
| "loss": 3.7203, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137449610610321e-05, |
| "loss": 3.7142, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136611015859268e-05, |
| "loss": 3.7005, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.135772421108216e-05, |
| "loss": 3.7094, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134933826357164e-05, |
| "loss": 3.7126, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134095231606112e-05, |
| "loss": 3.7254, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13325663685506e-05, |
| "loss": 3.7126, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.132418042104008e-05, |
| "loss": 3.7205, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131581085233329e-05, |
| "loss": 3.712, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1307424904822776e-05, |
| "loss": 3.7162, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1299038957312256e-05, |
| "loss": 3.7015, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290653009801736e-05, |
| "loss": 3.7106, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1282283441094945e-05, |
| "loss": 3.7248, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273897493584425e-05, |
| "loss": 3.722, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1265511546073905e-05, |
| "loss": 3.7041, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1257125598563384e-05, |
| "loss": 3.7107, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.855581283569336, |
| "eval_runtime": 571.6992, |
| "eval_samples_per_second": 667.468, |
| "eval_steps_per_second": 20.859, |
| "step": 534233 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1248756029856594e-05, |
| "loss": 3.7299, |
| "step": 534528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1240370082346074e-05, |
| "loss": 3.7082, |
| "step": 535040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1231984134835553e-05, |
| "loss": 3.7068, |
| "step": 535552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1223598187325033e-05, |
| "loss": 3.7139, |
| "step": 536064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1215212239814513e-05, |
| "loss": 3.6996, |
| "step": 536576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.120682629230399e-05, |
| "loss": 3.7022, |
| "step": 537088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.119844034479347e-05, |
| "loss": 3.695, |
| "step": 537600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.119005439728296e-05, |
| "loss": 3.7054, |
| "step": 538112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118168482857617e-05, |
| "loss": 3.7056, |
| "step": 538624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.117329888106565e-05, |
| "loss": 3.7119, |
| "step": 539136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.116492931235886e-05, |
| "loss": 3.7072, |
| "step": 539648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.115654336484834e-05, |
| "loss": 3.7021, |
| "step": 540160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.114815741733782e-05, |
| "loss": 3.7143, |
| "step": 540672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.11397714698273e-05, |
| "loss": 3.7049, |
| "step": 541184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.113138552231678e-05, |
| "loss": 3.7114, |
| "step": 541696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.112299957480626e-05, |
| "loss": 3.6888, |
| "step": 542208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.111461362729574e-05, |
| "loss": 3.6973, |
| "step": 542720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.110624405858895e-05, |
| "loss": 3.7072, |
| "step": 543232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.109785811107843e-05, |
| "loss": 3.7016, |
| "step": 543744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108947216356791e-05, |
| "loss": 3.7179, |
| "step": 544256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1081086216057394e-05, |
| "loss": 3.703, |
| "step": 544768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.10727166473506e-05, |
| "loss": 3.7086, |
| "step": 545280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.106433069984008e-05, |
| "loss": 3.707, |
| "step": 545792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.105596113113329e-05, |
| "loss": 3.6998, |
| "step": 546304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.104757518362277e-05, |
| "loss": 3.7058, |
| "step": 546816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103918923611225e-05, |
| "loss": 3.6975, |
| "step": 547328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103080328860173e-05, |
| "loss": 3.7112, |
| "step": 547840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.102241734109121e-05, |
| "loss": 3.6992, |
| "step": 548352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1014031393580685e-05, |
| "loss": 3.7066, |
| "step": 548864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1005645446070165e-05, |
| "loss": 3.6982, |
| "step": 549376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0997259498559645e-05, |
| "loss": 3.6905, |
| "step": 549888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098888992985286e-05, |
| "loss": 3.7056, |
| "step": 550400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098050398234234e-05, |
| "loss": 3.7067, |
| "step": 550912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097211803483182e-05, |
| "loss": 3.7073, |
| "step": 551424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.09637320873213e-05, |
| "loss": 3.7026, |
| "step": 551936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0955378897418245e-05, |
| "loss": 3.6981, |
| "step": 552448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0946992949907725e-05, |
| "loss": 3.7002, |
| "step": 552960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0938607002397205e-05, |
| "loss": 3.7119, |
| "step": 553472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0930221054886685e-05, |
| "loss": 3.705, |
| "step": 553984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.092183510737616e-05, |
| "loss": 3.6911, |
| "step": 554496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0913465538669374e-05, |
| "loss": 3.691, |
| "step": 555008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0905079591158854e-05, |
| "loss": 3.6878, |
| "step": 555520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0896693643648334e-05, |
| "loss": 3.7067, |
| "step": 556032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0888307696137814e-05, |
| "loss": 3.7005, |
| "step": 556544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0879921748627294e-05, |
| "loss": 3.6946, |
| "step": 557056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0871535801116774e-05, |
| "loss": 3.7096, |
| "step": 557568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0863149853606254e-05, |
| "loss": 3.6965, |
| "step": 558080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.085478028489946e-05, |
| "loss": 3.709, |
| "step": 558592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.084639433738894e-05, |
| "loss": 3.6883, |
| "step": 559104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.083800838987842e-05, |
| "loss": 3.6839, |
| "step": 559616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.08296224423679e-05, |
| "loss": 3.6935, |
| "step": 560128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082123649485738e-05, |
| "loss": 3.6947, |
| "step": 560640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.081285054734686e-05, |
| "loss": 3.6897, |
| "step": 561152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.080446459983634e-05, |
| "loss": 3.6985, |
| "step": 561664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.079609503112955e-05, |
| "loss": 3.6922, |
| "step": 562176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.078770908361904e-05, |
| "loss": 3.6843, |
| "step": 562688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077932313610852e-05, |
| "loss": 3.6953, |
| "step": 563200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0770937188598e-05, |
| "loss": 3.6816, |
| "step": 563712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.076255124108748e-05, |
| "loss": 3.6976, |
| "step": 564224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.075416529357696e-05, |
| "loss": 3.6923, |
| "step": 564736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.074579572487017e-05, |
| "loss": 3.6854, |
| "step": 565248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.073740977735965e-05, |
| "loss": 3.6889, |
| "step": 565760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072902382984913e-05, |
| "loss": 3.6797, |
| "step": 566272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072063788233861e-05, |
| "loss": 3.7038, |
| "step": 566784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.071225193482809e-05, |
| "loss": 3.685, |
| "step": 567296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07038823661213e-05, |
| "loss": 3.6932, |
| "step": 567808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0695496418610777e-05, |
| "loss": 3.6853, |
| "step": 568320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0687110471100257e-05, |
| "loss": 3.6819, |
| "step": 568832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0678724523589736e-05, |
| "loss": 3.6842, |
| "step": 569344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.067033857607922e-05, |
| "loss": 3.6862, |
| "step": 569856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.06619526285687e-05, |
| "loss": 3.69, |
| "step": 570368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0653566681058176e-05, |
| "loss": 3.6902, |
| "step": 570880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0645180733547656e-05, |
| "loss": 3.6969, |
| "step": 571392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.063681116484087e-05, |
| "loss": 3.7039, |
| "step": 571904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0628425217330345e-05, |
| "loss": 3.7026, |
| "step": 572416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0620039269819825e-05, |
| "loss": 3.6939, |
| "step": 572928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0611653322309305e-05, |
| "loss": 3.6928, |
| "step": 573440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.060328375360252e-05, |
| "loss": 3.6766, |
| "step": 573952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0594897806091994e-05, |
| "loss": 3.6892, |
| "step": 574464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.058652823738521e-05, |
| "loss": 3.7039, |
| "step": 574976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.057814228987469e-05, |
| "loss": 3.6939, |
| "step": 575488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056975634236418e-05, |
| "loss": 3.6911, |
| "step": 576000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056137039485365e-05, |
| "loss": 3.6723, |
| "step": 576512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.055298444734313e-05, |
| "loss": 3.6968, |
| "step": 577024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0544614878636346e-05, |
| "loss": 3.6713, |
| "step": 577536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.053622893112582e-05, |
| "loss": 3.6852, |
| "step": 578048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.05278429836153e-05, |
| "loss": 3.686, |
| "step": 578560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051945703610478e-05, |
| "loss": 3.6894, |
| "step": 579072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051107108859426e-05, |
| "loss": 3.6969, |
| "step": 579584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.050268514108374e-05, |
| "loss": 3.6948, |
| "step": 580096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.049429919357322e-05, |
| "loss": 3.6804, |
| "step": 580608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.04859132460627e-05, |
| "loss": 3.6917, |
| "step": 581120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0477543677355915e-05, |
| "loss": 3.686, |
| "step": 581632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0469157729845395e-05, |
| "loss": 3.683, |
| "step": 582144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0460771782334875e-05, |
| "loss": 3.6798, |
| "step": 582656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0452385834824355e-05, |
| "loss": 3.6942, |
| "step": 583168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0444016266117564e-05, |
| "loss": 3.6947, |
| "step": 583680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0435630318607044e-05, |
| "loss": 3.7025, |
| "step": 584192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0427244371096524e-05, |
| "loss": 3.6895, |
| "step": 584704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0418858423586004e-05, |
| "loss": 3.6901, |
| "step": 585216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0410472476075483e-05, |
| "loss": 3.6771, |
| "step": 585728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0402086528564963e-05, |
| "loss": 3.695, |
| "step": 586240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.039370058105444e-05, |
| "loss": 3.6927, |
| "step": 586752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.038533101234765e-05, |
| "loss": 3.6912, |
| "step": 587264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.037694506483713e-05, |
| "loss": 3.6936, |
| "step": 587776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.036855911732661e-05, |
| "loss": 3.6764, |
| "step": 588288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.036017316981609e-05, |
| "loss": 3.6736, |
| "step": 588800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.035180360110931e-05, |
| "loss": 3.6766, |
| "step": 589312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.034341765359879e-05, |
| "loss": 3.6826, |
| "step": 589824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.033503170608827e-05, |
| "loss": 3.6927, |
| "step": 590336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.032664575857775e-05, |
| "loss": 3.6918, |
| "step": 590848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031825981106723e-05, |
| "loss": 3.6615, |
| "step": 591360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030989024236044e-05, |
| "loss": 3.691, |
| "step": 591872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030150429484992e-05, |
| "loss": 3.6942, |
| "step": 592384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.02931183473394e-05, |
| "loss": 3.6914, |
| "step": 592896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.028473239982888e-05, |
| "loss": 3.69, |
| "step": 593408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.027634645231836e-05, |
| "loss": 3.6878, |
| "step": 593920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.026796050480783e-05, |
| "loss": 3.6764, |
| "step": 594432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025957455729732e-05, |
| "loss": 3.6955, |
| "step": 594944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025120498859053e-05, |
| "loss": 3.6855, |
| "step": 595456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.024281904108001e-05, |
| "loss": 3.6784, |
| "step": 595968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0234433093569486e-05, |
| "loss": 3.6718, |
| "step": 596480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0226047146058966e-05, |
| "loss": 3.6816, |
| "step": 596992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0217661198548446e-05, |
| "loss": 3.6864, |
| "step": 597504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0209275251037926e-05, |
| "loss": 3.6825, |
| "step": 598016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0200889303527406e-05, |
| "loss": 3.6933, |
| "step": 598528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0192519734820615e-05, |
| "loss": 3.6812, |
| "step": 599040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.018415016611383e-05, |
| "loss": 3.6776, |
| "step": 599552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0175764218603304e-05, |
| "loss": 3.6972, |
| "step": 600064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0167378271092784e-05, |
| "loss": 3.6737, |
| "step": 600576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.015899232358227e-05, |
| "loss": 3.6905, |
| "step": 601088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.015060637607175e-05, |
| "loss": 3.6965, |
| "step": 601600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.014222042856123e-05, |
| "loss": 3.6853, |
| "step": 602112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.013383448105071e-05, |
| "loss": 3.6914, |
| "step": 602624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.012546491234392e-05, |
| "loss": 3.6833, |
| "step": 603136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.01170789648334e-05, |
| "loss": 3.6711, |
| "step": 603648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010869301732288e-05, |
| "loss": 3.6777, |
| "step": 604160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010030706981236e-05, |
| "loss": 3.6853, |
| "step": 604672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.009192112230184e-05, |
| "loss": 3.6918, |
| "step": 605184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.008353517479132e-05, |
| "loss": 3.6859, |
| "step": 605696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.007516560608453e-05, |
| "loss": 3.6903, |
| "step": 606208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.006677965857401e-05, |
| "loss": 3.6797, |
| "step": 606720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005839371106349e-05, |
| "loss": 3.6867, |
| "step": 607232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005000776355297e-05, |
| "loss": 3.6718, |
| "step": 607744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0041621816042455e-05, |
| "loss": 3.6812, |
| "step": 608256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0033235868531935e-05, |
| "loss": 3.693, |
| "step": 608768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0024849921021415e-05, |
| "loss": 3.6941, |
| "step": 609280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0016463973510895e-05, |
| "loss": 3.6723, |
| "step": 609792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0008094404804104e-05, |
| "loss": 3.6837, |
| "step": 610304 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8487296104431152, |
| "eval_runtime": 589.3545, |
| "eval_samples_per_second": 647.473, |
| "eval_steps_per_second": 20.234, |
| "step": 610552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999972483609731e-05, |
| "loss": 3.7002, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999133888858679e-05, |
| "loss": 3.682, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998295294107627e-05, |
| "loss": 3.6762, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.997456699356575e-05, |
| "loss": 3.6839, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.996618104605523e-05, |
| "loss": 3.6691, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995779509854471e-05, |
| "loss": 3.6716, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994940915103419e-05, |
| "loss": 3.6717, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994102320352367e-05, |
| "loss": 3.6711, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993265363481689e-05, |
| "loss": 3.6742, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99242840661101e-05, |
| "loss": 3.6829, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991589811859958e-05, |
| "loss": 3.6762, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990751217108906e-05, |
| "loss": 3.6748, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989912622357854e-05, |
| "loss": 3.6858, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989074027606802e-05, |
| "loss": 3.6709, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9882370707361227e-05, |
| "loss": 3.6852, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9873984759850707e-05, |
| "loss": 3.6605, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865598812340187e-05, |
| "loss": 3.6675, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9857212864829666e-05, |
| "loss": 3.6787, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9848826917319146e-05, |
| "loss": 3.6727, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9840440969808626e-05, |
| "loss": 3.6884, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9832055022298106e-05, |
| "loss": 3.6756, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9823669074787586e-05, |
| "loss": 3.6763, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9815299506080795e-05, |
| "loss": 3.6804, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980692993737401e-05, |
| "loss": 3.6703, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979854398986349e-05, |
| "loss": 3.6773, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9790158042352964e-05, |
| "loss": 3.6684, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9781772094842444e-05, |
| "loss": 3.6809, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9773386147331924e-05, |
| "loss": 3.6698, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9765000199821404e-05, |
| "loss": 3.6798, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9756614252310884e-05, |
| "loss": 3.6678, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9748228304800364e-05, |
| "loss": 3.6599, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973985873609358e-05, |
| "loss": 3.6778, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973147278858306e-05, |
| "loss": 3.6788, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.972308684107254e-05, |
| "loss": 3.678, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.971471727236575e-05, |
| "loss": 3.6717, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.970633132485523e-05, |
| "loss": 3.6746, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.969794537734471e-05, |
| "loss": 3.669, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.968955942983419e-05, |
| "loss": 3.6816, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.968117348232367e-05, |
| "loss": 3.6764, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.967278753481315e-05, |
| "loss": 3.6616, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966440158730263e-05, |
| "loss": 3.6638, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.965601563979211e-05, |
| "loss": 3.6597, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964766244988905e-05, |
| "loss": 3.6777, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9639276502378534e-05, |
| "loss": 3.676, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9630890554868014e-05, |
| "loss": 3.6625, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9622504607357494e-05, |
| "loss": 3.6826, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9614118659846974e-05, |
| "loss": 3.666, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960574909114018e-05, |
| "loss": 3.6794, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959736314362966e-05, |
| "loss": 3.6647, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958897719611914e-05, |
| "loss": 3.6577, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958059124860862e-05, |
| "loss": 3.6637, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95722053010981e-05, |
| "loss": 3.6622, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956381935358758e-05, |
| "loss": 3.6692, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955543340607706e-05, |
| "loss": 3.6632, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954706383737027e-05, |
| "loss": 3.6658, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953867788985975e-05, |
| "loss": 3.6525, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953029194234923e-05, |
| "loss": 3.6669, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952190599483872e-05, |
| "loss": 3.6558, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95135200473282e-05, |
| "loss": 3.6678, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.950513409981768e-05, |
| "loss": 3.6586, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949676453111089e-05, |
| "loss": 3.6631, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948837858360037e-05, |
| "loss": 3.6612, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.947999263608985e-05, |
| "loss": 3.6513, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.947160668857933e-05, |
| "loss": 3.6729, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.94632207410688e-05, |
| "loss": 3.6579, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454851172362016e-05, |
| "loss": 3.6656, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9446465224851496e-05, |
| "loss": 3.6601, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.943807927734097e-05, |
| "loss": 3.6517, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9429693329830456e-05, |
| "loss": 3.6554, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9421307382319936e-05, |
| "loss": 3.659, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.941293781361315e-05, |
| "loss": 3.6627, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9404551866102625e-05, |
| "loss": 3.6617, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9396165918592105e-05, |
| "loss": 3.6658, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9387779971081585e-05, |
| "loss": 3.6819, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9379394023571065e-05, |
| "loss": 3.6745, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9371008076060545e-05, |
| "loss": 3.6653, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9362638507353754e-05, |
| "loss": 3.665, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9354252559843234e-05, |
| "loss": 3.6517, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9345866612332714e-05, |
| "loss": 3.6594, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9337480664822194e-05, |
| "loss": 3.6729, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9329094717311674e-05, |
| "loss": 3.6706, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932072514860489e-05, |
| "loss": 3.6595, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.931233920109437e-05, |
| "loss": 3.6482, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.930395325358385e-05, |
| "loss": 3.6688, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.929556730607333e-05, |
| "loss": 3.6405, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928718135856281e-05, |
| "loss": 3.6606, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927879541105229e-05, |
| "loss": 3.6604, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927040946354177e-05, |
| "loss": 3.6593, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926203989483498e-05, |
| "loss": 3.6715, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925365394732446e-05, |
| "loss": 3.6668, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924526799981394e-05, |
| "loss": 3.6551, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923688205230342e-05, |
| "loss": 3.6588, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.92284961047929e-05, |
| "loss": 3.6591, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922011015728238e-05, |
| "loss": 3.6562, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.921172420977186e-05, |
| "loss": 3.6535, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.920333826226134e-05, |
| "loss": 3.6648, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9194968693554554e-05, |
| "loss": 3.6706, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9186582746044034e-05, |
| "loss": 3.678, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9178196798533514e-05, |
| "loss": 3.6577, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916981085102299e-05, |
| "loss": 3.6639, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916142490351247e-05, |
| "loss": 3.653, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.915303895600195e-05, |
| "loss": 3.6622, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914466938729516e-05, |
| "loss": 3.6693, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9136283439784636e-05, |
| "loss": 3.6659, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9127897492274116e-05, |
| "loss": 3.6681, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9119511544763596e-05, |
| "loss": 3.6478, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9111125597253076e-05, |
| "loss": 3.6489, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9102739649742556e-05, |
| "loss": 3.6502, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909437008103577e-05, |
| "loss": 3.6585, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908598413352525e-05, |
| "loss": 3.6595, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907759818601473e-05, |
| "loss": 3.6648, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906921223850421e-05, |
| "loss": 3.6338, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906084266979742e-05, |
| "loss": 3.6639, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90524567222869e-05, |
| "loss": 3.6689, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.904407077477638e-05, |
| "loss": 3.6605, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.903568482726586e-05, |
| "loss": 3.664, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.902729887975534e-05, |
| "loss": 3.6617, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.901891293224482e-05, |
| "loss": 3.648, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90105269847343e-05, |
| "loss": 3.667, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900215741602751e-05, |
| "loss": 3.6636, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8993771468516996e-05, |
| "loss": 3.6495, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8985385521006476e-05, |
| "loss": 3.6476, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8976999573495956e-05, |
| "loss": 3.6547, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8968613625985436e-05, |
| "loss": 3.6569, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8960227678474916e-05, |
| "loss": 3.6554, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8951858109768125e-05, |
| "loss": 3.664, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8943472162257605e-05, |
| "loss": 3.6575, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8935086214747085e-05, |
| "loss": 3.6492, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8926700267236565e-05, |
| "loss": 3.672, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8918314319726045e-05, |
| "loss": 3.6479, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890992837221552e-05, |
| "loss": 3.6652, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8901558803508734e-05, |
| "loss": 3.6714, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8893172855998214e-05, |
| "loss": 3.6594, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8884786908487694e-05, |
| "loss": 3.6646, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8876400960977174e-05, |
| "loss": 3.6584, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.886803139227039e-05, |
| "loss": 3.6438, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885964544475987e-05, |
| "loss": 3.6546, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885125949724935e-05, |
| "loss": 3.6548, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884287354973882e-05, |
| "loss": 3.6671, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88344876022283e-05, |
| "loss": 3.6602, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882610165471778e-05, |
| "loss": 3.6594, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.881771570720726e-05, |
| "loss": 3.6586, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880932975969674e-05, |
| "loss": 3.6579, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880096019098995e-05, |
| "loss": 3.6442, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.879257424347943e-05, |
| "loss": 3.6553, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.878418829596892e-05, |
| "loss": 3.6654, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.87758023484584e-05, |
| "loss": 3.6665, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876743277975161e-05, |
| "loss": 3.6479, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.875904683224109e-05, |
| "loss": 3.6531, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.844803810119629, |
| "eval_runtime": 573.814, |
| "eval_samples_per_second": 665.008, |
| "eval_steps_per_second": 20.782, |
| "step": 686871 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8750677263534297e-05, |
| "loss": 3.6743, |
| "step": 687104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8742291316023777e-05, |
| "loss": 3.6552, |
| "step": 687616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8733905368513256e-05, |
| "loss": 3.6528, |
| "step": 688128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8725519421002736e-05, |
| "loss": 3.6571, |
| "step": 688640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8717133473492216e-05, |
| "loss": 3.6482, |
| "step": 689152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8708747525981696e-05, |
| "loss": 3.6444, |
| "step": 689664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8700377957274905e-05, |
| "loss": 3.6448, |
| "step": 690176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8691992009764385e-05, |
| "loss": 3.6448, |
| "step": 690688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8683606062253865e-05, |
| "loss": 3.6466, |
| "step": 691200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.867522011474335e-05, |
| "loss": 3.6586, |
| "step": 691712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.866685054603656e-05, |
| "loss": 3.6481, |
| "step": 692224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.865846459852604e-05, |
| "loss": 3.6498, |
| "step": 692736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.865007865101552e-05, |
| "loss": 3.6594, |
| "step": 693248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8641692703505e-05, |
| "loss": 3.6468, |
| "step": 693760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.863332313479821e-05, |
| "loss": 3.6553, |
| "step": 694272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.862493718728769e-05, |
| "loss": 3.6344, |
| "step": 694784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.861655123977717e-05, |
| "loss": 3.6404, |
| "step": 695296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.860816529226665e-05, |
| "loss": 3.6556, |
| "step": 695808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.859977934475613e-05, |
| "loss": 3.649, |
| "step": 696320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.859140977604934e-05, |
| "loss": 3.6616, |
| "step": 696832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.858302382853882e-05, |
| "loss": 3.6448, |
| "step": 697344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8574637881028306e-05, |
| "loss": 3.6601, |
| "step": 697856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8566251933517786e-05, |
| "loss": 3.6478, |
| "step": 698368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8557882364810995e-05, |
| "loss": 3.645, |
| "step": 698880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8549496417300475e-05, |
| "loss": 3.6521, |
| "step": 699392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8541110469789955e-05, |
| "loss": 3.6472, |
| "step": 699904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8532724522279435e-05, |
| "loss": 3.6543, |
| "step": 700416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8524338574768915e-05, |
| "loss": 3.6451, |
| "step": 700928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8515952627258395e-05, |
| "loss": 3.6539, |
| "step": 701440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8507566679747875e-05, |
| "loss": 3.6454, |
| "step": 701952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8499180732237354e-05, |
| "loss": 3.6276, |
| "step": 702464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8490811163530564e-05, |
| "loss": 3.6552, |
| "step": 702976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8482425216020044e-05, |
| "loss": 3.6543, |
| "step": 703488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8474039268509524e-05, |
| "loss": 3.6523, |
| "step": 704000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8465653320999003e-05, |
| "loss": 3.6487, |
| "step": 704512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.845728375229222e-05, |
| "loss": 3.6451, |
| "step": 705024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.84488978047817e-05, |
| "loss": 3.6459, |
| "step": 705536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.844052823607491e-05, |
| "loss": 3.6539, |
| "step": 706048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.843214228856439e-05, |
| "loss": 3.6529, |
| "step": 706560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.842375634105387e-05, |
| "loss": 3.6338, |
| "step": 707072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.841537039354335e-05, |
| "loss": 3.6424, |
| "step": 707584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.840700082483656e-05, |
| "loss": 3.637, |
| "step": 708096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.839861487732604e-05, |
| "loss": 3.6469, |
| "step": 708608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.839022892981552e-05, |
| "loss": 3.6515, |
| "step": 709120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8381842982305e-05, |
| "loss": 3.6397, |
| "step": 709632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.837347341359821e-05, |
| "loss": 3.6552, |
| "step": 710144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.836508746608769e-05, |
| "loss": 3.6396, |
| "step": 710656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.835670151857717e-05, |
| "loss": 3.6563, |
| "step": 711168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.834831557106665e-05, |
| "loss": 3.64, |
| "step": 711680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.833992962355613e-05, |
| "loss": 3.6325, |
| "step": 712192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8331543676045606e-05, |
| "loss": 3.6413, |
| "step": 712704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8323157728535086e-05, |
| "loss": 3.6332, |
| "step": 713216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8314771781024566e-05, |
| "loss": 3.6476, |
| "step": 713728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8306402212317775e-05, |
| "loss": 3.6369, |
| "step": 714240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.829803264361099e-05, |
| "loss": 3.6453, |
| "step": 714752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828964669610047e-05, |
| "loss": 3.6281, |
| "step": 715264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828126074858995e-05, |
| "loss": 3.6417, |
| "step": 715776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.827287480107943e-05, |
| "loss": 3.6283, |
| "step": 716288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.826448885356891e-05, |
| "loss": 3.6468, |
| "step": 716800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.825610290605839e-05, |
| "loss": 3.6348, |
| "step": 717312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.82477333373516e-05, |
| "loss": 3.6371, |
| "step": 717824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.823934738984108e-05, |
| "loss": 3.6369, |
| "step": 718336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.823096144233056e-05, |
| "loss": 3.6266, |
| "step": 718848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.822257549482004e-05, |
| "loss": 3.6456, |
| "step": 719360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.821420592611325e-05, |
| "loss": 3.6372, |
| "step": 719872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.820581997860273e-05, |
| "loss": 3.6442, |
| "step": 720384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.819743403109221e-05, |
| "loss": 3.6311, |
| "step": 720896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.818904808358169e-05, |
| "loss": 3.6305, |
| "step": 721408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.818066213607117e-05, |
| "loss": 3.6299, |
| "step": 721920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.817227618856065e-05, |
| "loss": 3.6375, |
| "step": 722432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8163890241050135e-05, |
| "loss": 3.6337, |
| "step": 722944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8155504293539615e-05, |
| "loss": 3.6393, |
| "step": 723456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8147134724832824e-05, |
| "loss": 3.6412, |
| "step": 723968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8138748777322304e-05, |
| "loss": 3.6585, |
| "step": 724480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8130362829811784e-05, |
| "loss": 3.6463, |
| "step": 724992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8121976882301264e-05, |
| "loss": 3.6435, |
| "step": 725504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.811360731359447e-05, |
| "loss": 3.6397, |
| "step": 726016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.810522136608395e-05, |
| "loss": 3.6299, |
| "step": 726528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.809683541857343e-05, |
| "loss": 3.6305, |
| "step": 727040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.808844947106291e-05, |
| "loss": 3.6492, |
| "step": 727552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.808007990235612e-05, |
| "loss": 3.6496, |
| "step": 728064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.80716939548456e-05, |
| "loss": 3.6399, |
| "step": 728576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.806330800733509e-05, |
| "loss": 3.6226, |
| "step": 729088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.805492205982457e-05, |
| "loss": 3.6378, |
| "step": 729600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.804655249111778e-05, |
| "loss": 3.6199, |
| "step": 730112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.803816654360726e-05, |
| "loss": 3.6369, |
| "step": 730624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802978059609674e-05, |
| "loss": 3.6359, |
| "step": 731136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802139464858622e-05, |
| "loss": 3.6342, |
| "step": 731648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.80130087010757e-05, |
| "loss": 3.646, |
| "step": 732160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.800462275356518e-05, |
| "loss": 3.6411, |
| "step": 732672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.799625318485839e-05, |
| "loss": 3.6344, |
| "step": 733184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.798786723734787e-05, |
| "loss": 3.6355, |
| "step": 733696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.797948128983735e-05, |
| "loss": 3.6369, |
| "step": 734208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.797109534232683e-05, |
| "loss": 3.629, |
| "step": 734720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.796270939481631e-05, |
| "loss": 3.6315, |
| "step": 735232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.795432344730579e-05, |
| "loss": 3.638, |
| "step": 735744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7945953878599e-05, |
| "loss": 3.6466, |
| "step": 736256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.793756793108848e-05, |
| "loss": 3.6525, |
| "step": 736768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.792918198357796e-05, |
| "loss": 3.6327, |
| "step": 737280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7920796036067436e-05, |
| "loss": 3.6417, |
| "step": 737792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7912410088556916e-05, |
| "loss": 3.6315, |
| "step": 738304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7904024141046396e-05, |
| "loss": 3.6395, |
| "step": 738816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.789565457233961e-05, |
| "loss": 3.6382, |
| "step": 739328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7887268624829085e-05, |
| "loss": 3.6448, |
| "step": 739840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7878882677318565e-05, |
| "loss": 3.6447, |
| "step": 740352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7870496729808045e-05, |
| "loss": 3.6243, |
| "step": 740864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.786212716110126e-05, |
| "loss": 3.6264, |
| "step": 741376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.785374121359074e-05, |
| "loss": 3.6254, |
| "step": 741888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.784535526608022e-05, |
| "loss": 3.6302, |
| "step": 742400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.78369693185697e-05, |
| "loss": 3.638, |
| "step": 742912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.782858337105918e-05, |
| "loss": 3.641, |
| "step": 743424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.782021380235239e-05, |
| "loss": 3.616, |
| "step": 743936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.781182785484187e-05, |
| "loss": 3.6379, |
| "step": 744448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.780344190733135e-05, |
| "loss": 3.6429, |
| "step": 744960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.779505595982083e-05, |
| "loss": 3.6384, |
| "step": 745472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.778667001231031e-05, |
| "loss": 3.64, |
| "step": 745984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.777828406479979e-05, |
| "loss": 3.6367, |
| "step": 746496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7769914496093e-05, |
| "loss": 3.6265, |
| "step": 747008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.776152854858248e-05, |
| "loss": 3.6412, |
| "step": 747520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.775314260107196e-05, |
| "loss": 3.6397, |
| "step": 748032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7744756653561445e-05, |
| "loss": 3.6333, |
| "step": 748544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7736370706050925e-05, |
| "loss": 3.6185, |
| "step": 749056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7728001137344134e-05, |
| "loss": 3.6341, |
| "step": 749568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7719615189833614e-05, |
| "loss": 3.6313, |
| "step": 750080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7711229242323094e-05, |
| "loss": 3.6352, |
| "step": 750592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7702843294812574e-05, |
| "loss": 3.6336, |
| "step": 751104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7694457347302054e-05, |
| "loss": 3.6404, |
| "step": 751616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.768608777859526e-05, |
| "loss": 3.6264, |
| "step": 752128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.767770183108474e-05, |
| "loss": 3.6469, |
| "step": 752640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.766931588357422e-05, |
| "loss": 3.6256, |
| "step": 753152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.76609299360637e-05, |
| "loss": 3.6414, |
| "step": 753664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.765256036735691e-05, |
| "loss": 3.6487, |
| "step": 754176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.76441744198464e-05, |
| "loss": 3.6347, |
| "step": 754688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.763578847233588e-05, |
| "loss": 3.6412, |
| "step": 755200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.762740252482536e-05, |
| "loss": 3.6372, |
| "step": 755712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.761903295611857e-05, |
| "loss": 3.6213, |
| "step": 756224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.761064700860805e-05, |
| "loss": 3.6304, |
| "step": 756736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.760226106109753e-05, |
| "loss": 3.6274, |
| "step": 757248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.759387511358701e-05, |
| "loss": 3.6425, |
| "step": 757760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.758548916607649e-05, |
| "loss": 3.638, |
| "step": 758272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.757710321856597e-05, |
| "loss": 3.6375, |
| "step": 758784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7568733649859176e-05, |
| "loss": 3.6406, |
| "step": 759296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7560347702348656e-05, |
| "loss": 3.6317, |
| "step": 759808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7551961754838136e-05, |
| "loss": 3.6212, |
| "step": 760320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7543575807327616e-05, |
| "loss": 3.6319, |
| "step": 760832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7535189859817096e-05, |
| "loss": 3.6441, |
| "step": 761344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7526803912306576e-05, |
| "loss": 3.6409, |
| "step": 761856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7518417964796056e-05, |
| "loss": 3.6242, |
| "step": 762368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.751004839608927e-05, |
| "loss": 3.6291, |
| "step": 762880 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8420088291168213, |
| "eval_runtime": 320.9317, |
| "eval_samples_per_second": 1189.01, |
| "eval_steps_per_second": 37.157, |
| "step": 763190 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.750169520618621e-05, |
| "loss": 3.647, |
| "step": 763392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.749330925867569e-05, |
| "loss": 3.6286, |
| "step": 763904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.748492331116517e-05, |
| "loss": 3.6318, |
| "step": 764416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.747653736365465e-05, |
| "loss": 3.6288, |
| "step": 764928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.746815141614413e-05, |
| "loss": 3.6307, |
| "step": 765440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745976546863361e-05, |
| "loss": 3.6182, |
| "step": 765952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745137952112309e-05, |
| "loss": 3.6235, |
| "step": 766464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.744299357361257e-05, |
| "loss": 3.6168, |
| "step": 766976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.743460762610205e-05, |
| "loss": 3.6265, |
| "step": 767488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7426238057395266e-05, |
| "loss": 3.6335, |
| "step": 768000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7417852109884746e-05, |
| "loss": 3.6302, |
| "step": 768512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.740946616237422e-05, |
| "loss": 3.6247, |
| "step": 769024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.74010802148637e-05, |
| "loss": 3.635, |
| "step": 769536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7392710646156915e-05, |
| "loss": 3.6246, |
| "step": 770048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7384324698646395e-05, |
| "loss": 3.633, |
| "step": 770560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.737593875113587e-05, |
| "loss": 3.612, |
| "step": 771072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.736755280362535e-05, |
| "loss": 3.6151, |
| "step": 771584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.735916685611483e-05, |
| "loss": 3.636, |
| "step": 772096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.735078090860431e-05, |
| "loss": 3.6259, |
| "step": 772608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7342411339897524e-05, |
| "loss": 3.6383, |
| "step": 773120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7334025392387004e-05, |
| "loss": 3.619, |
| "step": 773632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7325639444876483e-05, |
| "loss": 3.6388, |
| "step": 774144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7317253497365963e-05, |
| "loss": 3.6266, |
| "step": 774656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.730888392865917e-05, |
| "loss": 3.623, |
| "step": 775168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.730049798114865e-05, |
| "loss": 3.6276, |
| "step": 775680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.729211203363813e-05, |
| "loss": 3.6243, |
| "step": 776192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.728372608612761e-05, |
| "loss": 3.6316, |
| "step": 776704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.727534013861709e-05, |
| "loss": 3.6204, |
| "step": 777216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.726695419110657e-05, |
| "loss": 3.6292, |
| "step": 777728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.725856824359605e-05, |
| "loss": 3.6273, |
| "step": 778240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.725018229608553e-05, |
| "loss": 3.6066, |
| "step": 778752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.724182910618248e-05, |
| "loss": 3.6287, |
| "step": 779264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.723344315867196e-05, |
| "loss": 3.6305, |
| "step": 779776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.722505721116144e-05, |
| "loss": 3.6274, |
| "step": 780288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.721667126365092e-05, |
| "loss": 3.6251, |
| "step": 780800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.72082853161404e-05, |
| "loss": 3.6263, |
| "step": 781312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7199915747433606e-05, |
| "loss": 3.6248, |
| "step": 781824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7191529799923086e-05, |
| "loss": 3.6292, |
| "step": 782336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7183143852412566e-05, |
| "loss": 3.6343, |
| "step": 782848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7174757904902046e-05, |
| "loss": 3.6122, |
| "step": 783360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7166388336195255e-05, |
| "loss": 3.6199, |
| "step": 783872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7158002388684735e-05, |
| "loss": 3.6126, |
| "step": 784384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7149616441174215e-05, |
| "loss": 3.6235, |
| "step": 784896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7141230493663695e-05, |
| "loss": 3.6276, |
| "step": 785408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.713286092495691e-05, |
| "loss": 3.6158, |
| "step": 785920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.712447497744639e-05, |
| "loss": 3.6343, |
| "step": 786432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.711608902993587e-05, |
| "loss": 3.6195, |
| "step": 786944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.710770308242535e-05, |
| "loss": 3.6321, |
| "step": 787456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.709931713491483e-05, |
| "loss": 3.6225, |
| "step": 787968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.709094756620804e-05, |
| "loss": 3.6064, |
| "step": 788480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.708256161869752e-05, |
| "loss": 3.6192, |
| "step": 788992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7074175671187e-05, |
| "loss": 3.6103, |
| "step": 789504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.706578972367648e-05, |
| "loss": 3.6262, |
| "step": 790016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.705740377616596e-05, |
| "loss": 3.6171, |
| "step": 790528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.704903420745917e-05, |
| "loss": 3.6179, |
| "step": 791040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.704064825994865e-05, |
| "loss": 3.6045, |
| "step": 791552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.703226231243813e-05, |
| "loss": 3.6198, |
| "step": 792064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7023876364927615e-05, |
| "loss": 3.6081, |
| "step": 792576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7015506796220824e-05, |
| "loss": 3.6231, |
| "step": 793088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7007120848710304e-05, |
| "loss": 3.615, |
| "step": 793600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6998734901199784e-05, |
| "loss": 3.613, |
| "step": 794112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6990348953689264e-05, |
| "loss": 3.6178, |
| "step": 794624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6981963006178744e-05, |
| "loss": 3.6033, |
| "step": 795136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.697359343747195e-05, |
| "loss": 3.6227, |
| "step": 795648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.696520748996143e-05, |
| "loss": 3.6162, |
| "step": 796160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.695682154245091e-05, |
| "loss": 3.6223, |
| "step": 796672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.694843559494039e-05, |
| "loss": 3.6112, |
| "step": 797184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.694004964742987e-05, |
| "loss": 3.6066, |
| "step": 797696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.693166369991935e-05, |
| "loss": 3.6074, |
| "step": 798208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.692327775240883e-05, |
| "loss": 3.614, |
| "step": 798720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.691489180489831e-05, |
| "loss": 3.6115, |
| "step": 799232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.690652223619153e-05, |
| "loss": 3.6173, |
| "step": 799744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.689815266748474e-05, |
| "loss": 3.6164, |
| "step": 800256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.688976671997422e-05, |
| "loss": 3.6352, |
| "step": 800768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.68813807724637e-05, |
| "loss": 3.6278, |
| "step": 801280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.687299482495317e-05, |
| "loss": 3.6188, |
| "step": 801792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.686462525624639e-05, |
| "loss": 3.6195, |
| "step": 802304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.685623930873587e-05, |
| "loss": 3.6097, |
| "step": 802816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.684785336122535e-05, |
| "loss": 3.6144, |
| "step": 803328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.683946741371482e-05, |
| "loss": 3.6179, |
| "step": 803840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6831097845008036e-05, |
| "loss": 3.6261, |
| "step": 804352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.682271189749752e-05, |
| "loss": 3.6199, |
| "step": 804864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6814325949987e-05, |
| "loss": 3.6046, |
| "step": 805376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6805940002476476e-05, |
| "loss": 3.6148, |
| "step": 805888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.679757043376969e-05, |
| "loss": 3.5996, |
| "step": 806400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.678918448625917e-05, |
| "loss": 3.6147, |
| "step": 806912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6780798538748645e-05, |
| "loss": 3.6099, |
| "step": 807424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6772412591238125e-05, |
| "loss": 3.616, |
| "step": 807936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6764026643727605e-05, |
| "loss": 3.627, |
| "step": 808448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.675565707502082e-05, |
| "loss": 3.6196, |
| "step": 808960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6747271127510294e-05, |
| "loss": 3.6094, |
| "step": 809472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6738885179999774e-05, |
| "loss": 3.6128, |
| "step": 809984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.673049923248926e-05, |
| "loss": 3.6152, |
| "step": 810496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.672211328497874e-05, |
| "loss": 3.6087, |
| "step": 811008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.671372733746822e-05, |
| "loss": 3.6083, |
| "step": 811520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.67053413899577e-05, |
| "loss": 3.6168, |
| "step": 812032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.669697182125091e-05, |
| "loss": 3.6253, |
| "step": 812544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.668858587374039e-05, |
| "loss": 3.6294, |
| "step": 813056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.668019992622987e-05, |
| "loss": 3.6124, |
| "step": 813568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.667181397871935e-05, |
| "loss": 3.6189, |
| "step": 814080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.666342803120883e-05, |
| "loss": 3.6112, |
| "step": 814592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.665504208369831e-05, |
| "loss": 3.6186, |
| "step": 815104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.664665613618779e-05, |
| "loss": 3.6124, |
| "step": 815616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6638286567481e-05, |
| "loss": 3.6288, |
| "step": 816128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.662990061997048e-05, |
| "loss": 3.6194, |
| "step": 816640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.662151467245996e-05, |
| "loss": 3.6015, |
| "step": 817152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.661312872494944e-05, |
| "loss": 3.6075, |
| "step": 817664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6604742777438925e-05, |
| "loss": 3.6053, |
| "step": 818176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6596373208732134e-05, |
| "loss": 3.6082, |
| "step": 818688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6587987261221614e-05, |
| "loss": 3.6125, |
| "step": 819200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6579601313711094e-05, |
| "loss": 3.6223, |
| "step": 819712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6571215366200574e-05, |
| "loss": 3.5951, |
| "step": 820224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.656284579749378e-05, |
| "loss": 3.6101, |
| "step": 820736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.655445984998326e-05, |
| "loss": 3.6225, |
| "step": 821248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.654607390247274e-05, |
| "loss": 3.6197, |
| "step": 821760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.653768795496222e-05, |
| "loss": 3.6214, |
| "step": 822272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.65293020074517e-05, |
| "loss": 3.6178, |
| "step": 822784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.652093243874491e-05, |
| "loss": 3.5996, |
| "step": 823296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.651254649123439e-05, |
| "loss": 3.6194, |
| "step": 823808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.650416054372388e-05, |
| "loss": 3.6212, |
| "step": 824320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.649577459621336e-05, |
| "loss": 3.6105, |
| "step": 824832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.648738864870284e-05, |
| "loss": 3.5987, |
| "step": 825344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.647901907999605e-05, |
| "loss": 3.6101, |
| "step": 825856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.647063313248553e-05, |
| "loss": 3.6111, |
| "step": 826368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.646224718497501e-05, |
| "loss": 3.6164, |
| "step": 826880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.645386123746448e-05, |
| "loss": 3.611, |
| "step": 827392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.644547528995396e-05, |
| "loss": 3.6199, |
| "step": 827904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6437105721247176e-05, |
| "loss": 3.6029, |
| "step": 828416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6428719773736656e-05, |
| "loss": 3.622, |
| "step": 828928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.642033382622613e-05, |
| "loss": 3.6067, |
| "step": 829440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6411947878715616e-05, |
| "loss": 3.6195, |
| "step": 829952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.640357831000883e-05, |
| "loss": 3.6323, |
| "step": 830464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6395192362498305e-05, |
| "loss": 3.6108, |
| "step": 830976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6386806414987785e-05, |
| "loss": 3.6165, |
| "step": 831488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6378420467477265e-05, |
| "loss": 3.6181, |
| "step": 832000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.637005089877048e-05, |
| "loss": 3.6021, |
| "step": 832512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6361664951259954e-05, |
| "loss": 3.6064, |
| "step": 833024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6353279003749434e-05, |
| "loss": 3.61, |
| "step": 833536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6344893056238914e-05, |
| "loss": 3.6187, |
| "step": 834048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6336507108728394e-05, |
| "loss": 3.6163, |
| "step": 834560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.63281375400216e-05, |
| "loss": 3.617, |
| "step": 835072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.631975159251108e-05, |
| "loss": 3.6189, |
| "step": 835584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.631136564500057e-05, |
| "loss": 3.6105, |
| "step": 836096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.630297969749005e-05, |
| "loss": 3.6029, |
| "step": 836608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.629459374997953e-05, |
| "loss": 3.6116, |
| "step": 837120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.628620780246901e-05, |
| "loss": 3.6222, |
| "step": 837632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.627782185495849e-05, |
| "loss": 3.6207, |
| "step": 838144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.626943590744797e-05, |
| "loss": 3.6047, |
| "step": 838656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.626106633874118e-05, |
| "loss": 3.6101, |
| "step": 839168 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8408679962158203, |
| "eval_runtime": 316.2085, |
| "eval_samples_per_second": 1206.77, |
| "eval_steps_per_second": 37.712, |
| "step": 839509 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.625269677003439e-05, |
| "loss": 3.5989, |
| "step": 839680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.624431082252387e-05, |
| "loss": 3.6106, |
| "step": 840192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.623592487501335e-05, |
| "loss": 3.6132, |
| "step": 840704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.622753892750283e-05, |
| "loss": 3.6167, |
| "step": 841216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.621916935879604e-05, |
| "loss": 3.6265, |
| "step": 841728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6210783411285524e-05, |
| "loss": 3.6098, |
| "step": 842240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6202397463775004e-05, |
| "loss": 3.6159, |
| "step": 842752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6194011516264484e-05, |
| "loss": 3.6139, |
| "step": 843264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6185625568753964e-05, |
| "loss": 3.6058, |
| "step": 843776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.617725600004717e-05, |
| "loss": 3.6059, |
| "step": 844288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616887005253665e-05, |
| "loss": 3.6184, |
| "step": 844800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616048410502613e-05, |
| "loss": 3.6248, |
| "step": 845312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.615211453631934e-05, |
| "loss": 3.6171, |
| "step": 845824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.614372858880882e-05, |
| "loss": 3.5941, |
| "step": 846336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.61353426412983e-05, |
| "loss": 3.6152, |
| "step": 846848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.612695669378778e-05, |
| "loss": 3.6002, |
| "step": 847360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.611857074627726e-05, |
| "loss": 3.61, |
| "step": 847872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.611018479876674e-05, |
| "loss": 3.6086, |
| "step": 848384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.610179885125622e-05, |
| "loss": 3.6089, |
| "step": 848896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.609341290374571e-05, |
| "loss": 3.6174, |
| "step": 849408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.608502695623519e-05, |
| "loss": 3.6222, |
| "step": 849920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.60766573875284e-05, |
| "loss": 3.6138, |
| "step": 850432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.606827144001788e-05, |
| "loss": 3.6099, |
| "step": 850944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605988549250736e-05, |
| "loss": 3.6004, |
| "step": 851456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605149954499684e-05, |
| "loss": 3.6172, |
| "step": 851968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.604311359748632e-05, |
| "loss": 3.615, |
| "step": 852480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6034744028779526e-05, |
| "loss": 3.6058, |
| "step": 852992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6026358081269006e-05, |
| "loss": 3.6085, |
| "step": 853504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6017972133758486e-05, |
| "loss": 3.6038, |
| "step": 854016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6009586186247966e-05, |
| "loss": 3.6117, |
| "step": 854528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6001216617541175e-05, |
| "loss": 3.6015, |
| "step": 855040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5992830670030655e-05, |
| "loss": 3.6122, |
| "step": 855552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.598444472252014e-05, |
| "loss": 3.6169, |
| "step": 856064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5976058775009615e-05, |
| "loss": 3.6151, |
| "step": 856576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.596768920630283e-05, |
| "loss": 3.6105, |
| "step": 857088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.595930325879231e-05, |
| "loss": 3.6065, |
| "step": 857600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.595091731128179e-05, |
| "loss": 3.6162, |
| "step": 858112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5942531363771264e-05, |
| "loss": 3.606, |
| "step": 858624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5934145416260744e-05, |
| "loss": 3.6008, |
| "step": 859136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5925759468750224e-05, |
| "loss": 3.6061, |
| "step": 859648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.591738990004344e-05, |
| "loss": 3.5978, |
| "step": 860160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.590900395253291e-05, |
| "loss": 3.6054, |
| "step": 860672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.590061800502239e-05, |
| "loss": 3.6028, |
| "step": 861184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.589223205751188e-05, |
| "loss": 3.6093, |
| "step": 861696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.588384611000136e-05, |
| "loss": 3.6114, |
| "step": 862208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.587546016249084e-05, |
| "loss": 3.6147, |
| "step": 862720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.586707421498032e-05, |
| "loss": 3.6131, |
| "step": 863232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.58586882674698e-05, |
| "loss": 3.6078, |
| "step": 863744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.585031869876301e-05, |
| "loss": 3.5855, |
| "step": 864256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.584193275125249e-05, |
| "loss": 3.6147, |
| "step": 864768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.583354680374197e-05, |
| "loss": 3.5993, |
| "step": 865280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.582516085623145e-05, |
| "loss": 3.6066, |
| "step": 865792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.581677490872093e-05, |
| "loss": 3.612, |
| "step": 866304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.580840534001414e-05, |
| "loss": 3.6041, |
| "step": 866816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.580001939250362e-05, |
| "loss": 3.5886, |
| "step": 867328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.57916334449931e-05, |
| "loss": 3.5954, |
| "step": 867840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.578324749748258e-05, |
| "loss": 3.6045, |
| "step": 868352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.577487792877579e-05, |
| "loss": 3.5977, |
| "step": 868864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.576649198126527e-05, |
| "loss": 3.6136, |
| "step": 869376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.575810603375475e-05, |
| "loss": 3.6014, |
| "step": 869888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.574972008624423e-05, |
| "loss": 3.5911, |
| "step": 870400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.574133413873371e-05, |
| "loss": 3.6209, |
| "step": 870912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.573294819122319e-05, |
| "loss": 3.597, |
| "step": 871424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.57245786225164e-05, |
| "loss": 3.5894, |
| "step": 871936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.571619267500588e-05, |
| "loss": 3.6032, |
| "step": 872448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.570780672749536e-05, |
| "loss": 3.5989, |
| "step": 872960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569942077998484e-05, |
| "loss": 3.5932, |
| "step": 873472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569105121127805e-05, |
| "loss": 3.6007, |
| "step": 873984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.568266526376753e-05, |
| "loss": 3.5841, |
| "step": 874496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.567427931625702e-05, |
| "loss": 3.5969, |
| "step": 875008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.56658933687465e-05, |
| "loss": 3.6098, |
| "step": 875520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.565752380003971e-05, |
| "loss": 3.6014, |
| "step": 876032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564913785252919e-05, |
| "loss": 3.608, |
| "step": 876544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5640751905018667e-05, |
| "loss": 3.6007, |
| "step": 877056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5632365957508147e-05, |
| "loss": 3.6189, |
| "step": 877568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5623996388801356e-05, |
| "loss": 3.5944, |
| "step": 878080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5615610441290836e-05, |
| "loss": 3.6111, |
| "step": 878592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5607224493780316e-05, |
| "loss": 3.6021, |
| "step": 879104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5598838546269796e-05, |
| "loss": 3.5888, |
| "step": 879616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.559045259875927e-05, |
| "loss": 3.61, |
| "step": 880128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5582083030052485e-05, |
| "loss": 3.6105, |
| "step": 880640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.557369708254197e-05, |
| "loss": 3.6055, |
| "step": 881152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.556531113503145e-05, |
| "loss": 3.609, |
| "step": 881664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5556925187520924e-05, |
| "loss": 3.5871, |
| "step": 882176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5548539240010404e-05, |
| "loss": 3.5854, |
| "step": 882688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.554016967130362e-05, |
| "loss": 3.6004, |
| "step": 883200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.55317837237931e-05, |
| "loss": 3.6068, |
| "step": 883712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5523397776282573e-05, |
| "loss": 3.5953, |
| "step": 884224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.551501182877205e-05, |
| "loss": 3.592, |
| "step": 884736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.550664226006527e-05, |
| "loss": 3.6005, |
| "step": 885248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.549825631255474e-05, |
| "loss": 3.6013, |
| "step": 885760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.548987036504422e-05, |
| "loss": 3.5962, |
| "step": 886272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.548148441753371e-05, |
| "loss": 3.596, |
| "step": 886784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.547309847002319e-05, |
| "loss": 3.6082, |
| "step": 887296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.54647289013164e-05, |
| "loss": 3.6196, |
| "step": 887808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.545634295380588e-05, |
| "loss": 3.607, |
| "step": 888320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.544795700629536e-05, |
| "loss": 3.593, |
| "step": 888832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543957105878484e-05, |
| "loss": 3.5948, |
| "step": 889344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543120149007805e-05, |
| "loss": 3.5985, |
| "step": 889856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.542281554256753e-05, |
| "loss": 3.6096, |
| "step": 890368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.541442959505701e-05, |
| "loss": 3.5998, |
| "step": 890880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.540604364754649e-05, |
| "loss": 3.5989, |
| "step": 891392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5397674078839696e-05, |
| "loss": 3.5951, |
| "step": 891904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5389288131329176e-05, |
| "loss": 3.6059, |
| "step": 892416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.538090218381866e-05, |
| "loss": 3.6027, |
| "step": 892928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.537251623630814e-05, |
| "loss": 3.5961, |
| "step": 893440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.536414666760135e-05, |
| "loss": 3.5907, |
| "step": 893952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.535576072009083e-05, |
| "loss": 3.5925, |
| "step": 894464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.534737477258031e-05, |
| "loss": 3.5971, |
| "step": 894976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.533898882506979e-05, |
| "loss": 3.591, |
| "step": 895488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5330619256363e-05, |
| "loss": 3.5973, |
| "step": 896000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.532223330885248e-05, |
| "loss": 3.592, |
| "step": 896512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.531384736134196e-05, |
| "loss": 3.601, |
| "step": 897024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.530546141383144e-05, |
| "loss": 3.5924, |
| "step": 897536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.529707546632092e-05, |
| "loss": 3.6077, |
| "step": 898048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.528870589761413e-05, |
| "loss": 3.6098, |
| "step": 898560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.528031995010361e-05, |
| "loss": 3.5997, |
| "step": 899072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5271934002593096e-05, |
| "loss": 3.6098, |
| "step": 899584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5263548055082576e-05, |
| "loss": 3.5979, |
| "step": 900096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5255178486375785e-05, |
| "loss": 3.5948, |
| "step": 900608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5246792538865265e-05, |
| "loss": 3.5948, |
| "step": 901120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5238406591354745e-05, |
| "loss": 3.5859, |
| "step": 901632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5230020643844225e-05, |
| "loss": 3.5998, |
| "step": 902144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5221634696333705e-05, |
| "loss": 3.5981, |
| "step": 902656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5213248748823185e-05, |
| "loss": 3.6006, |
| "step": 903168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5204879180116394e-05, |
| "loss": 3.5938, |
| "step": 903680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5196493232605874e-05, |
| "loss": 3.6023, |
| "step": 904192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5188107285095354e-05, |
| "loss": 3.5975, |
| "step": 904704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5179721337584834e-05, |
| "loss": 3.5919, |
| "step": 905216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517135176887805e-05, |
| "loss": 3.5988, |
| "step": 905728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.516296582136753e-05, |
| "loss": 3.6003, |
| "step": 906240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.515457987385701e-05, |
| "loss": 3.6076, |
| "step": 906752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.514619392634649e-05, |
| "loss": 3.5887, |
| "step": 907264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.51378243576397e-05, |
| "loss": 3.6103, |
| "step": 907776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512943841012918e-05, |
| "loss": 3.5923, |
| "step": 908288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512105246261866e-05, |
| "loss": 3.6046, |
| "step": 908800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.511266651510814e-05, |
| "loss": 3.6034, |
| "step": 909312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.510428056759762e-05, |
| "loss": 3.5942, |
| "step": 909824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.50958946200871e-05, |
| "loss": 3.5906, |
| "step": 910336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.508752505138031e-05, |
| "loss": 3.6115, |
| "step": 910848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.507913910386979e-05, |
| "loss": 3.5859, |
| "step": 911360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.507075315635927e-05, |
| "loss": 3.6024, |
| "step": 911872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.506236720884875e-05, |
| "loss": 3.597, |
| "step": 912384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5053981261338234e-05, |
| "loss": 3.5987, |
| "step": 912896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5045611692631444e-05, |
| "loss": 3.5922, |
| "step": 913408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5037225745120924e-05, |
| "loss": 3.6014, |
| "step": 913920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5028839797610403e-05, |
| "loss": 3.599, |
| "step": 914432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.502045385009988e-05, |
| "loss": 3.6099, |
| "step": 914944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.501208428139309e-05, |
| "loss": 3.5963, |
| "step": 915456 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8362677097320557, |
| "eval_runtime": 305.5683, |
| "eval_samples_per_second": 1248.791, |
| "eval_steps_per_second": 39.026, |
| "step": 915829 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.500369833388257e-05, |
| "loss": 3.589, |
| "step": 915968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.499531238637205e-05, |
| "loss": 3.5888, |
| "step": 916480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4986926438861526e-05, |
| "loss": 3.5935, |
| "step": 916992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4978540491351006e-05, |
| "loss": 3.5988, |
| "step": 917504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.497017092264422e-05, |
| "loss": 3.6002, |
| "step": 918016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.49617849751337e-05, |
| "loss": 3.591, |
| "step": 918528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.495339902762318e-05, |
| "loss": 3.5984, |
| "step": 919040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.494501308011266e-05, |
| "loss": 3.5921, |
| "step": 919552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.493662713260214e-05, |
| "loss": 3.5857, |
| "step": 920064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.492825756389535e-05, |
| "loss": 3.5793, |
| "step": 920576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491987161638483e-05, |
| "loss": 3.6034, |
| "step": 921088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491148566887431e-05, |
| "loss": 3.6032, |
| "step": 921600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4903116100167526e-05, |
| "loss": 3.5952, |
| "step": 922112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4894730152657e-05, |
| "loss": 3.5735, |
| "step": 922624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.488634420514648e-05, |
| "loss": 3.5921, |
| "step": 923136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.487795825763596e-05, |
| "loss": 3.5789, |
| "step": 923648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.486957231012544e-05, |
| "loss": 3.5932, |
| "step": 924160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4861186362614926e-05, |
| "loss": 3.5877, |
| "step": 924672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4852800415104406e-05, |
| "loss": 3.587, |
| "step": 925184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4844414467593886e-05, |
| "loss": 3.5952, |
| "step": 925696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4836044898887095e-05, |
| "loss": 3.6014, |
| "step": 926208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4827658951376575e-05, |
| "loss": 3.5898, |
| "step": 926720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4819273003866055e-05, |
| "loss": 3.59, |
| "step": 927232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4810887056355535e-05, |
| "loss": 3.5846, |
| "step": 927744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4802517487648744e-05, |
| "loss": 3.5953, |
| "step": 928256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4794131540138224e-05, |
| "loss": 3.5935, |
| "step": 928768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4785745592627704e-05, |
| "loss": 3.5813, |
| "step": 929280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4777359645117184e-05, |
| "loss": 3.5877, |
| "step": 929792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4768973697606664e-05, |
| "loss": 3.5874, |
| "step": 930304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.476060412889988e-05, |
| "loss": 3.5874, |
| "step": 930816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.475221818138936e-05, |
| "loss": 3.5851, |
| "step": 931328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.474383223387884e-05, |
| "loss": 3.5915, |
| "step": 931840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.473544628636832e-05, |
| "loss": 3.5901, |
| "step": 932352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.472707671766153e-05, |
| "loss": 3.5998, |
| "step": 932864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.471869077015101e-05, |
| "loss": 3.5886, |
| "step": 933376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.471030482264049e-05, |
| "loss": 3.5848, |
| "step": 933888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.470191887512997e-05, |
| "loss": 3.5969, |
| "step": 934400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.469354930642318e-05, |
| "loss": 3.5807, |
| "step": 934912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.468516335891266e-05, |
| "loss": 3.5811, |
| "step": 935424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.467677741140214e-05, |
| "loss": 3.5846, |
| "step": 935936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.466839146389162e-05, |
| "loss": 3.5796, |
| "step": 936448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.46600055163811e-05, |
| "loss": 3.5838, |
| "step": 936960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.465161956887058e-05, |
| "loss": 3.5842, |
| "step": 937472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.464325000016379e-05, |
| "loss": 3.5881, |
| "step": 937984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.463486405265327e-05, |
| "loss": 3.5903, |
| "step": 938496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.462647810514275e-05, |
| "loss": 3.5947, |
| "step": 939008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.461809215763223e-05, |
| "loss": 3.5883, |
| "step": 939520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.460972258892544e-05, |
| "loss": 3.5896, |
| "step": 940032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.460133664141492e-05, |
| "loss": 3.5697, |
| "step": 940544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.45929506939044e-05, |
| "loss": 3.5933, |
| "step": 941056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.458456474639388e-05, |
| "loss": 3.5777, |
| "step": 941568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.457619517768709e-05, |
| "loss": 3.5852, |
| "step": 942080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.456780923017657e-05, |
| "loss": 3.5889, |
| "step": 942592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.455942328266605e-05, |
| "loss": 3.5834, |
| "step": 943104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.455103733515553e-05, |
| "loss": 3.569, |
| "step": 943616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.454266776644875e-05, |
| "loss": 3.5773, |
| "step": 944128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.453428181893823e-05, |
| "loss": 3.5838, |
| "step": 944640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.452589587142771e-05, |
| "loss": 3.575, |
| "step": 945152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.451750992391719e-05, |
| "loss": 3.5944, |
| "step": 945664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4509140355210396e-05, |
| "loss": 3.5822, |
| "step": 946176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4500754407699876e-05, |
| "loss": 3.5686, |
| "step": 946688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4492368460189356e-05, |
| "loss": 3.5993, |
| "step": 947200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4483982512678836e-05, |
| "loss": 3.5763, |
| "step": 947712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4475612943972045e-05, |
| "loss": 3.5698, |
| "step": 948224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4467226996461525e-05, |
| "loss": 3.5838, |
| "step": 948736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4458841048951005e-05, |
| "loss": 3.578, |
| "step": 949248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4450455101440485e-05, |
| "loss": 3.5719, |
| "step": 949760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4442069153929965e-05, |
| "loss": 3.5815, |
| "step": 950272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.443369958522318e-05, |
| "loss": 3.5674, |
| "step": 950784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.442531363771266e-05, |
| "loss": 3.5708, |
| "step": 951296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4416927690202134e-05, |
| "loss": 3.5913, |
| "step": 951808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4408541742691614e-05, |
| "loss": 3.5818, |
| "step": 952320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.440017217398483e-05, |
| "loss": 3.586, |
| "step": 952832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.439178622647431e-05, |
| "loss": 3.5842, |
| "step": 953344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.438340027896378e-05, |
| "loss": 3.5971, |
| "step": 953856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.437501433145326e-05, |
| "loss": 3.574, |
| "step": 954368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.436664476274648e-05, |
| "loss": 3.5939, |
| "step": 954880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.435825881523596e-05, |
| "loss": 3.5799, |
| "step": 955392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434987286772544e-05, |
| "loss": 3.572, |
| "step": 955904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434148692021492e-05, |
| "loss": 3.5849, |
| "step": 956416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4333117351508134e-05, |
| "loss": 3.5927, |
| "step": 956928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.432473140399761e-05, |
| "loss": 3.5827, |
| "step": 957440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.431634545648709e-05, |
| "loss": 3.5927, |
| "step": 957952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.430795950897657e-05, |
| "loss": 3.5679, |
| "step": 958464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.429957356146605e-05, |
| "loss": 3.5645, |
| "step": 958976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4291203992759256e-05, |
| "loss": 3.5792, |
| "step": 959488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4282818045248736e-05, |
| "loss": 3.585, |
| "step": 960000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4274432097738216e-05, |
| "loss": 3.5767, |
| "step": 960512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4266046150227696e-05, |
| "loss": 3.5736, |
| "step": 961024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.425767658152091e-05, |
| "loss": 3.5814, |
| "step": 961536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.424929063401039e-05, |
| "loss": 3.5819, |
| "step": 962048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.424090468649987e-05, |
| "loss": 3.574, |
| "step": 962560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.423251873898935e-05, |
| "loss": 3.5753, |
| "step": 963072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.422414917028256e-05, |
| "loss": 3.5864, |
| "step": 963584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.421576322277204e-05, |
| "loss": 3.5994, |
| "step": 964096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.420737727526152e-05, |
| "loss": 3.5875, |
| "step": 964608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4198991327751e-05, |
| "loss": 3.5721, |
| "step": 965120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.419060538024048e-05, |
| "loss": 3.5783, |
| "step": 965632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.418223581153369e-05, |
| "loss": 3.5787, |
| "step": 966144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.417384986402317e-05, |
| "loss": 3.5868, |
| "step": 966656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.416546391651265e-05, |
| "loss": 3.5826, |
| "step": 967168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.415707796900213e-05, |
| "loss": 3.5771, |
| "step": 967680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4148708400295346e-05, |
| "loss": 3.5772, |
| "step": 968192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4140322452784826e-05, |
| "loss": 3.5816, |
| "step": 968704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4131936505274306e-05, |
| "loss": 3.5848, |
| "step": 969216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4123550557763786e-05, |
| "loss": 3.5765, |
| "step": 969728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4115180989056995e-05, |
| "loss": 3.5737, |
| "step": 970240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4106795041546475e-05, |
| "loss": 3.5714, |
| "step": 970752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4098409094035955e-05, |
| "loss": 3.5785, |
| "step": 971264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4090023146525434e-05, |
| "loss": 3.5739, |
| "step": 971776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4081653577818644e-05, |
| "loss": 3.5749, |
| "step": 972288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4073267630308124e-05, |
| "loss": 3.5735, |
| "step": 972800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4064881682797603e-05, |
| "loss": 3.5792, |
| "step": 973312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4056495735287083e-05, |
| "loss": 3.5734, |
| "step": 973824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4048109787776563e-05, |
| "loss": 3.5866, |
| "step": 974336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.403974021906978e-05, |
| "loss": 3.5929, |
| "step": 974848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.403135427155926e-05, |
| "loss": 3.5802, |
| "step": 975360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.402296832404874e-05, |
| "loss": 3.5869, |
| "step": 975872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.401458237653822e-05, |
| "loss": 3.5818, |
| "step": 976384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.400621280783143e-05, |
| "loss": 3.5751, |
| "step": 976896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.399782686032091e-05, |
| "loss": 3.5786, |
| "step": 977408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398944091281039e-05, |
| "loss": 3.5608, |
| "step": 977920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398105496529987e-05, |
| "loss": 3.5822, |
| "step": 978432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.397266901778935e-05, |
| "loss": 3.5774, |
| "step": 978944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.396429944908256e-05, |
| "loss": 3.5826, |
| "step": 979456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.395591350157204e-05, |
| "loss": 3.5745, |
| "step": 979968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.394752755406152e-05, |
| "loss": 3.5873, |
| "step": 980480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3939141606551e-05, |
| "loss": 3.5762, |
| "step": 980992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.393077203784421e-05, |
| "loss": 3.5712, |
| "step": 981504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.392238609033369e-05, |
| "loss": 3.5835, |
| "step": 982016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.391400014282317e-05, |
| "loss": 3.5787, |
| "step": 982528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.390561419531265e-05, |
| "loss": 3.5891, |
| "step": 983040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.389724462660586e-05, |
| "loss": 3.5742, |
| "step": 983552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388885867909534e-05, |
| "loss": 3.5865, |
| "step": 984064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388047273158482e-05, |
| "loss": 3.5766, |
| "step": 984576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.38720867840743e-05, |
| "loss": 3.5822, |
| "step": 985088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.386371721536751e-05, |
| "loss": 3.5844, |
| "step": 985600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.385533126785699e-05, |
| "loss": 3.5744, |
| "step": 986112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.384694532034647e-05, |
| "loss": 3.5718, |
| "step": 986624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383855937283595e-05, |
| "loss": 3.5884, |
| "step": 987136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383017342532544e-05, |
| "loss": 3.5705, |
| "step": 987648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3821803856618647e-05, |
| "loss": 3.579, |
| "step": 988160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3813417909108126e-05, |
| "loss": 3.581, |
| "step": 988672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3805031961597606e-05, |
| "loss": 3.5799, |
| "step": 989184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3796646014087086e-05, |
| "loss": 3.5757, |
| "step": 989696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3788276445380295e-05, |
| "loss": 3.5802, |
| "step": 990208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3779890497869775e-05, |
| "loss": 3.581, |
| "step": 990720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3771504550359255e-05, |
| "loss": 3.5855, |
| "step": 991232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3763118602848735e-05, |
| "loss": 3.5791, |
| "step": 991744 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8367321491241455, |
| "eval_runtime": 306.279, |
| "eval_samples_per_second": 1245.894, |
| "eval_steps_per_second": 38.935, |
| "step": 992149 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.375473265533821e-05, |
| "loss": 3.5717, |
| "step": 992256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.374634670782769e-05, |
| "loss": 3.5649, |
| "step": 992768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3737960760317175e-05, |
| "loss": 3.573, |
| "step": 993280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3729574812806655e-05, |
| "loss": 3.5858, |
| "step": 993792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3721205244099864e-05, |
| "loss": 3.5774, |
| "step": 994304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3712819296589344e-05, |
| "loss": 3.5744, |
| "step": 994816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3704433349078824e-05, |
| "loss": 3.5782, |
| "step": 995328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3696047401568304e-05, |
| "loss": 3.5732, |
| "step": 995840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3687661454057784e-05, |
| "loss": 3.5653, |
| "step": 996352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367929188535099e-05, |
| "loss": 3.5647, |
| "step": 996864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367090593784047e-05, |
| "loss": 3.5826, |
| "step": 997376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.366251999032995e-05, |
| "loss": 3.5819, |
| "step": 997888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.365415042162316e-05, |
| "loss": 3.5844, |
| "step": 998400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.364576447411264e-05, |
| "loss": 3.5506, |
| "step": 998912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.363737852660213e-05, |
| "loss": 3.5722, |
| "step": 999424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362899257909161e-05, |
| "loss": 3.5601, |
| "step": 999936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362060663158109e-05, |
| "loss": 3.5758, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.361222068407057e-05, |
| "loss": 3.5703, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.360383473656005e-05, |
| "loss": 3.5697, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.359544878904953e-05, |
| "loss": 3.5739, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.358707922034274e-05, |
| "loss": 3.5871, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.357869327283222e-05, |
| "loss": 3.5716, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.35703073253217e-05, |
| "loss": 3.571, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.356192137781118e-05, |
| "loss": 3.5662, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.355355180910439e-05, |
| "loss": 3.5784, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.354516586159387e-05, |
| "loss": 3.5747, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.353677991408335e-05, |
| "loss": 3.5633, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3528393966572827e-05, |
| "loss": 3.5718, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.352002439786604e-05, |
| "loss": 3.5651, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.351163845035552e-05, |
| "loss": 3.5676, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3503252502845e-05, |
| "loss": 3.5689, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.349486655533448e-05, |
| "loss": 3.5722, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.348648060782396e-05, |
| "loss": 3.5724, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.347811103911717e-05, |
| "loss": 3.5787, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346972509160665e-05, |
| "loss": 3.5742, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346133914409613e-05, |
| "loss": 3.5697, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.345295319658561e-05, |
| "loss": 3.5774, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.344458362787882e-05, |
| "loss": 3.5618, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34361976803683e-05, |
| "loss": 3.5647, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.342781173285778e-05, |
| "loss": 3.5678, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.341942578534727e-05, |
| "loss": 3.56, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3411056216640476e-05, |
| "loss": 3.5655, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3402670269129956e-05, |
| "loss": 3.5687, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3394284321619436e-05, |
| "loss": 3.5707, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3385898374108916e-05, |
| "loss": 3.5717, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3377528805402125e-05, |
| "loss": 3.5718, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3369142857891605e-05, |
| "loss": 3.5726, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3360756910381085e-05, |
| "loss": 3.5691, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3352370962870565e-05, |
| "loss": 3.5522, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3344001394163774e-05, |
| "loss": 3.5706, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3335615446653254e-05, |
| "loss": 3.564, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3327229499142734e-05, |
| "loss": 3.5703, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.331884355163222e-05, |
| "loss": 3.57, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3310457604121694e-05, |
| "loss": 3.5635, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.330208803541491e-05, |
| "loss": 3.5539, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.329370208790439e-05, |
| "loss": 3.5573, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.328531614039387e-05, |
| "loss": 3.5699, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.327693019288334e-05, |
| "loss": 3.5531, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.326854424537282e-05, |
| "loss": 3.5778, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.326017467666604e-05, |
| "loss": 3.5686, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.325178872915552e-05, |
| "loss": 3.5451, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.324340278164499e-05, |
| "loss": 3.581, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.323501683413447e-05, |
| "loss": 3.5595, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.322663088662396e-05, |
| "loss": 3.5547, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.321826131791717e-05, |
| "loss": 3.56, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320987537040665e-05, |
| "loss": 3.5648, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320148942289613e-05, |
| "loss": 3.5528, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.319310347538561e-05, |
| "loss": 3.5651, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3184733906678817e-05, |
| "loss": 3.5483, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3176347959168296e-05, |
| "loss": 3.5551, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3167962011657776e-05, |
| "loss": 3.5751, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3159576064147256e-05, |
| "loss": 3.5628, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3151206495440465e-05, |
| "loss": 3.5689, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3142820547929945e-05, |
| "loss": 3.5635, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3134434600419425e-05, |
| "loss": 3.5801, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3126048652908905e-05, |
| "loss": 3.5588, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.311767908420212e-05, |
| "loss": 3.5749, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.31092931366916e-05, |
| "loss": 3.5599, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.310090718918108e-05, |
| "loss": 3.5578, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.309252124167056e-05, |
| "loss": 3.5637, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.308415167296377e-05, |
| "loss": 3.5785, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.307576572545325e-05, |
| "loss": 3.562, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.306737977794273e-05, |
| "loss": 3.5758, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305899383043221e-05, |
| "loss": 3.547, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305062426172542e-05, |
| "loss": 3.5488, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.30422383142149e-05, |
| "loss": 3.5611, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.303385236670438e-05, |
| "loss": 3.5633, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.302546641919386e-05, |
| "loss": 3.5596, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3017096850487075e-05, |
| "loss": 3.5595, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3008710902976555e-05, |
| "loss": 3.5608, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3000324955466035e-05, |
| "loss": 3.5619, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2991939007955515e-05, |
| "loss": 3.5589, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2983569439248724e-05, |
| "loss": 3.5568, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2975183491738204e-05, |
| "loss": 3.5684, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2966797544227684e-05, |
| "loss": 3.5788, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2958411596717164e-05, |
| "loss": 3.5736, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.295004202801037e-05, |
| "loss": 3.5521, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.294165608049985e-05, |
| "loss": 3.5592, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.293327013298933e-05, |
| "loss": 3.5629, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.292488418547881e-05, |
| "loss": 3.5699, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.291651461677203e-05, |
| "loss": 3.5643, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.290812866926151e-05, |
| "loss": 3.5595, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.289974272175099e-05, |
| "loss": 3.5604, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.289135677424047e-05, |
| "loss": 3.5673, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.288298720553368e-05, |
| "loss": 3.5648, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.287460125802316e-05, |
| "loss": 3.5611, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.286621531051264e-05, |
| "loss": 3.5574, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.285782936300212e-05, |
| "loss": 3.5517, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2849459794295326e-05, |
| "loss": 3.5631, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2841073846784806e-05, |
| "loss": 3.5566, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2832687899274286e-05, |
| "loss": 3.5553, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2824301951763766e-05, |
| "loss": 3.5568, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.281593238305698e-05, |
| "loss": 3.5633, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.280754643554646e-05, |
| "loss": 3.5555, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.279916048803594e-05, |
| "loss": 3.5659, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.279077454052542e-05, |
| "loss": 3.572, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.27823885930149e-05, |
| "loss": 3.5654, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.277401902430811e-05, |
| "loss": 3.5704, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.276563307679759e-05, |
| "loss": 3.5656, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.275724712928707e-05, |
| "loss": 3.557, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.274886118177655e-05, |
| "loss": 3.5618, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.274049161306976e-05, |
| "loss": 3.5452, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.273210566555924e-05, |
| "loss": 3.5653, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.272371971804872e-05, |
| "loss": 3.5554, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.271533377053821e-05, |
| "loss": 3.567, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2706964201831416e-05, |
| "loss": 3.5612, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2698578254320896e-05, |
| "loss": 3.5636, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2690192306810376e-05, |
| "loss": 3.5554, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2681806359299856e-05, |
| "loss": 3.5541, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2673436790593065e-05, |
| "loss": 3.5634, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2665050843082545e-05, |
| "loss": 3.5645, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2656664895572025e-05, |
| "loss": 3.5708, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2648278948061505e-05, |
| "loss": 3.558, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2639909379354714e-05, |
| "loss": 3.5695, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2631523431844194e-05, |
| "loss": 3.5592, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2623137484333674e-05, |
| "loss": 3.5625, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.261475153682316e-05, |
| "loss": 3.5688, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.260638196811637e-05, |
| "loss": 3.5593, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.259799602060585e-05, |
| "loss": 3.5561, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.258961007309533e-05, |
| "loss": 3.5691, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.258122412558481e-05, |
| "loss": 3.5553, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.257283817807429e-05, |
| "loss": 3.5573, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.25644686093675e-05, |
| "loss": 3.5656, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.255608266185698e-05, |
| "loss": 3.565, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.254769671434646e-05, |
| "loss": 3.5569, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253931076683594e-05, |
| "loss": 3.5664, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253094119812915e-05, |
| "loss": 3.5597, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.252255525061863e-05, |
| "loss": 3.5733, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2514169303108114e-05, |
| "loss": 3.5582, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8382389545440674, |
| "eval_runtime": 301.8741, |
| "eval_samples_per_second": 1264.073, |
| "eval_steps_per_second": 39.503, |
| "step": 1068469 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2505783355597594e-05, |
| "loss": 3.5536, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.24974137868908e-05, |
| "loss": 3.5501, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.248902783938028e-05, |
| "loss": 3.5568, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.248064189186976e-05, |
| "loss": 3.5695, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.247225594435924e-05, |
| "loss": 3.5612, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2463869996848716e-05, |
| "loss": 3.5583, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.245550042814193e-05, |
| "loss": 3.5578, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.244711448063141e-05, |
| "loss": 3.558, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.243872853312089e-05, |
| "loss": 3.5478, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2430342585610365e-05, |
| "loss": 3.5525, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.242197301690358e-05, |
| "loss": 3.5588, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.241358706939306e-05, |
| "loss": 3.5716, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.240521750068628e-05, |
| "loss": 3.5682, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.239683155317576e-05, |
| "loss": 3.5349, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238844560566524e-05, |
| "loss": 3.5537, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238005965815472e-05, |
| "loss": 3.5424, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.237167371064419e-05, |
| "loss": 3.5612, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.236328776313367e-05, |
| "loss": 3.55, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.235490181562315e-05, |
| "loss": 3.5553, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.234651586811263e-05, |
| "loss": 3.5511, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.233814629940584e-05, |
| "loss": 3.572, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.232976035189532e-05, |
| "loss": 3.5539, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.23213744043848e-05, |
| "loss": 3.5559, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2312988456874285e-05, |
| "loss": 3.5491, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2304602509363765e-05, |
| "loss": 3.5674, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2296232940656975e-05, |
| "loss": 3.5533, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2287846993146454e-05, |
| "loss": 3.5491, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2279461045635934e-05, |
| "loss": 3.5528, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2271075098125414e-05, |
| "loss": 3.553, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2262705529418623e-05, |
| "loss": 3.5493, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2254319581908103e-05, |
| "loss": 3.5523, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2245933634397583e-05, |
| "loss": 3.5532, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.223754768688706e-05, |
| "loss": 3.5584, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.222917811818027e-05, |
| "loss": 3.5588, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.222079217066975e-05, |
| "loss": 3.5614, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.221240622315924e-05, |
| "loss": 3.5556, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.220402027564872e-05, |
| "loss": 3.5622, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.219565070694193e-05, |
| "loss": 3.5417, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.218726475943141e-05, |
| "loss": 3.5485, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.217887881192089e-05, |
| "loss": 3.5507, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.217049286441037e-05, |
| "loss": 3.5402, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.216210691689985e-05, |
| "loss": 3.5526, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.215373734819306e-05, |
| "loss": 3.5547, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.214535140068254e-05, |
| "loss": 3.5483, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.213696545317202e-05, |
| "loss": 3.5571, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.21285795056615e-05, |
| "loss": 3.556, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2120209936954706e-05, |
| "loss": 3.556, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.211182398944419e-05, |
| "loss": 3.5529, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.210343804193367e-05, |
| "loss": 3.5371, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.209505209442315e-05, |
| "loss": 3.5535, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.208666614691263e-05, |
| "loss": 3.5494, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.207829657820584e-05, |
| "loss": 3.5577, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.206991063069532e-05, |
| "loss": 3.5492, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.20615246831848e-05, |
| "loss": 3.5466, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.205313873567428e-05, |
| "loss": 3.5391, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.204476916696749e-05, |
| "loss": 3.5411, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.203638321945697e-05, |
| "loss": 3.5522, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.202799727194645e-05, |
| "loss": 3.5356, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201961132443593e-05, |
| "loss": 3.5623, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2011241755729146e-05, |
| "loss": 3.5541, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2002855808218626e-05, |
| "loss": 3.5301, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1994469860708106e-05, |
| "loss": 3.5636, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1986083913197586e-05, |
| "loss": 3.5418, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1977697965687066e-05, |
| "loss": 3.5442, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1969328396980275e-05, |
| "loss": 3.5384, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1960942449469755e-05, |
| "loss": 3.5487, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1952556501959235e-05, |
| "loss": 3.5374, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1944170554448715e-05, |
| "loss": 3.5518, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1935800985741924e-05, |
| "loss": 3.5274, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1927415038231404e-05, |
| "loss": 3.5407, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1919029090720884e-05, |
| "loss": 3.5547, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1910643143210364e-05, |
| "loss": 3.547, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.190227357450358e-05, |
| "loss": 3.5528, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.189388762699306e-05, |
| "loss": 3.5493, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.188550167948254e-05, |
| "loss": 3.562, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.187711573197202e-05, |
| "loss": 3.5423, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.186874616326523e-05, |
| "loss": 3.5642, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.186036021575471e-05, |
| "loss": 3.5419, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.185197426824419e-05, |
| "loss": 3.5416, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.184358832073367e-05, |
| "loss": 3.5494, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.183521875202688e-05, |
| "loss": 3.5593, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.182683280451636e-05, |
| "loss": 3.549, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.181844685700584e-05, |
| "loss": 3.5573, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.181006090949532e-05, |
| "loss": 3.5351, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1801691340788534e-05, |
| "loss": 3.5287, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1793305393278014e-05, |
| "loss": 3.5459, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1784919445767494e-05, |
| "loss": 3.5482, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1776533498256974e-05, |
| "loss": 3.5417, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.176816392955018e-05, |
| "loss": 3.5419, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.175977798203966e-05, |
| "loss": 3.5477, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.175139203452914e-05, |
| "loss": 3.5457, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.174300608701862e-05, |
| "loss": 3.5426, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.173463651831183e-05, |
| "loss": 3.5369, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.172625057080131e-05, |
| "loss": 3.5542, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.171786462329079e-05, |
| "loss": 3.5637, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.170947867578027e-05, |
| "loss": 3.5549, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.170110910707349e-05, |
| "loss": 3.5384, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.169272315956297e-05, |
| "loss": 3.5439, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.168433721205245e-05, |
| "loss": 3.5444, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.167595126454192e-05, |
| "loss": 3.553, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1667581695835136e-05, |
| "loss": 3.5513, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1659195748324616e-05, |
| "loss": 3.5402, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1650809800814096e-05, |
| "loss": 3.544, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.164242385330357e-05, |
| "loss": 3.5487, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1634054284596785e-05, |
| "loss": 3.5512, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1625668337086265e-05, |
| "loss": 3.5436, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1617282389575745e-05, |
| "loss": 3.5459, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1608896442065225e-05, |
| "loss": 3.5331, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.160052687335844e-05, |
| "loss": 3.5489, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.159214092584792e-05, |
| "loss": 3.5361, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1583754978337394e-05, |
| "loss": 3.5385, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1575369030826874e-05, |
| "loss": 3.5402, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.156699946212009e-05, |
| "loss": 3.5471, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155861351460957e-05, |
| "loss": 3.5424, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155022756709904e-05, |
| "loss": 3.5451, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.154184161958852e-05, |
| "loss": 3.5548, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.153347205088174e-05, |
| "loss": 3.5536, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.152508610337121e-05, |
| "loss": 3.5569, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.15167001558607e-05, |
| "loss": 3.5451, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.150831420835018e-05, |
| "loss": 3.5443, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.149992826083966e-05, |
| "loss": 3.5452, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.149155869213287e-05, |
| "loss": 3.5292, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.148317274462235e-05, |
| "loss": 3.5479, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.147478679711183e-05, |
| "loss": 3.5428, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.146640084960131e-05, |
| "loss": 3.5499, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.145803128089452e-05, |
| "loss": 3.5428, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1449645333384e-05, |
| "loss": 3.551, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.144125938587348e-05, |
| "loss": 3.537, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.143287343836296e-05, |
| "loss": 3.541, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1424503869656166e-05, |
| "loss": 3.5508, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1416117922145646e-05, |
| "loss": 3.5445, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.140773197463513e-05, |
| "loss": 3.5557, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139934602712461e-05, |
| "loss": 3.5426, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139097645841782e-05, |
| "loss": 3.5489, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.13825905109073e-05, |
| "loss": 3.5457, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.137420456339678e-05, |
| "loss": 3.5433, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.136581861588626e-05, |
| "loss": 3.5524, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.135744904717947e-05, |
| "loss": 3.5409, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.134906309966895e-05, |
| "loss": 3.5466, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.134067715215843e-05, |
| "loss": 3.552, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.133229120464791e-05, |
| "loss": 3.5408, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.132392163594112e-05, |
| "loss": 3.541, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.13155356884306e-05, |
| "loss": 3.5512, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1307149740920086e-05, |
| "loss": 3.5466, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1298763793409566e-05, |
| "loss": 3.5422, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1290394224702775e-05, |
| "loss": 3.5522, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1282008277192255e-05, |
| "loss": 3.5405, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1273622329681735e-05, |
| "loss": 3.5605, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1265236382171215e-05, |
| "loss": 3.5416, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.83803653717041, |
| "eval_runtime": 301.7255, |
| "eval_samples_per_second": 1264.696, |
| "eval_steps_per_second": 39.523, |
| "step": 1144789 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1256850434660695e-05, |
| "loss": 3.5105, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1248464487150175e-05, |
| "loss": 3.5411, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1240078539639655e-05, |
| "loss": 3.5333, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1231692592129135e-05, |
| "loss": 3.5553, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1223323023422344e-05, |
| "loss": 3.5444, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1214937075911824e-05, |
| "loss": 3.5462, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1206551128401304e-05, |
| "loss": 3.5402, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1198165180890784e-05, |
| "loss": 3.5413, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118977923338027e-05, |
| "loss": 3.5318, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118140966467348e-05, |
| "loss": 3.5386, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.117302371716296e-05, |
| "loss": 3.5423, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.116463776965244e-05, |
| "loss": 3.5545, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.115626820094565e-05, |
| "loss": 3.5522, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.114788225343513e-05, |
| "loss": 3.5212, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.113949630592461e-05, |
| "loss": 3.5383, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.113111035841409e-05, |
| "loss": 3.5294, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.112272441090357e-05, |
| "loss": 3.5403, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.111433846339305e-05, |
| "loss": 3.5334, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.110595251588252e-05, |
| "loss": 3.54, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.109756656837201e-05, |
| "loss": 3.5378, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1089196999665224e-05, |
| "loss": 3.5569, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1080811052154704e-05, |
| "loss": 3.5422, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.107242510464418e-05, |
| "loss": 3.5378, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.106403915713366e-05, |
| "loss": 3.5339, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.105566958842687e-05, |
| "loss": 3.5537, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1047283640916346e-05, |
| "loss": 3.5354, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1038897693405826e-05, |
| "loss": 3.5324, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1030511745895306e-05, |
| "loss": 3.5412, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1022125798384786e-05, |
| "loss": 3.5333, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1013756229677995e-05, |
| "loss": 3.5326, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1005370282167475e-05, |
| "loss": 3.5392, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.099698433465696e-05, |
| "loss": 3.5358, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098859838714644e-05, |
| "loss": 3.5411, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098022881843965e-05, |
| "loss": 3.5471, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097184287092913e-05, |
| "loss": 3.5433, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.096345692341861e-05, |
| "loss": 3.5394, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.095507097590809e-05, |
| "loss": 3.5456, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.09467014072013e-05, |
| "loss": 3.5281, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.093831545969078e-05, |
| "loss": 3.5347, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092992951218026e-05, |
| "loss": 3.5346, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092154356466974e-05, |
| "loss": 3.5268, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.091317399596295e-05, |
| "loss": 3.536, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.090478804845243e-05, |
| "loss": 3.5395, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0896402100941916e-05, |
| "loss": 3.5332, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0888016153431396e-05, |
| "loss": 3.5405, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0879646584724605e-05, |
| "loss": 3.5421, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0871260637214085e-05, |
| "loss": 3.5406, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0862874689703565e-05, |
| "loss": 3.5371, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0854488742193045e-05, |
| "loss": 3.5214, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0846102794682525e-05, |
| "loss": 3.5363, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0837733225975734e-05, |
| "loss": 3.5392, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0829347278465214e-05, |
| "loss": 3.5373, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0820961330954694e-05, |
| "loss": 3.5348, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0812575383444174e-05, |
| "loss": 3.5305, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.080420581473738e-05, |
| "loss": 3.5204, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.079581986722686e-05, |
| "loss": 3.525, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.078743391971635e-05, |
| "loss": 3.5384, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077904797220583e-05, |
| "loss": 3.5258, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077067840349904e-05, |
| "loss": 3.5415, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076229245598852e-05, |
| "loss": 3.5378, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0753906508478e-05, |
| "loss": 3.5219, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.074552056096748e-05, |
| "loss": 3.5456, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.073715099226069e-05, |
| "loss": 3.5246, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072876504475017e-05, |
| "loss": 3.5277, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072037909723965e-05, |
| "loss": 3.5254, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071199314972913e-05, |
| "loss": 3.5363, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0703623581022336e-05, |
| "loss": 3.5218, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0695237633511816e-05, |
| "loss": 3.5331, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06868516860013e-05, |
| "loss": 3.5128, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.067846573849078e-05, |
| "loss": 3.5254, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.067009616978399e-05, |
| "loss": 3.5406, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.066171022227347e-05, |
| "loss": 3.533, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.065332427476295e-05, |
| "loss": 3.5348, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.064493832725243e-05, |
| "loss": 3.5369, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.063656875854564e-05, |
| "loss": 3.5443, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.062818281103512e-05, |
| "loss": 3.5303, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06197968635246e-05, |
| "loss": 3.5481, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061141091601408e-05, |
| "loss": 3.5261, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.060304134730729e-05, |
| "loss": 3.5264, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.059465539979677e-05, |
| "loss": 3.5322, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058626945228626e-05, |
| "loss": 3.5421, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057788350477574e-05, |
| "loss": 3.5351, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0569513936068946e-05, |
| "loss": 3.5425, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0561127988558426e-05, |
| "loss": 3.5217, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0552742041047906e-05, |
| "loss": 3.5169, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0544356093537386e-05, |
| "loss": 3.5288, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0535986524830595e-05, |
| "loss": 3.5308, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0527600577320075e-05, |
| "loss": 3.5281, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0519214629809555e-05, |
| "loss": 3.5266, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0510828682299038e-05, |
| "loss": 3.5358, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0502459113592247e-05, |
| "loss": 3.528, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0494073166081727e-05, |
| "loss": 3.528, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0485687218571207e-05, |
| "loss": 3.5252, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0477301271060687e-05, |
| "loss": 3.5381, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.04689317023539e-05, |
| "loss": 3.5476, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.046054575484338e-05, |
| "loss": 3.5432, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.045215980733286e-05, |
| "loss": 3.5184, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.044377385982234e-05, |
| "loss": 3.5317, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.043540429111555e-05, |
| "loss": 3.5293, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.042701834360503e-05, |
| "loss": 3.5358, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.041863239609451e-05, |
| "loss": 3.5365, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0410246448583988e-05, |
| "loss": 3.5248, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.04018768798772e-05, |
| "loss": 3.529, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.039349093236668e-05, |
| "loss": 3.5356, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.038510498485616e-05, |
| "loss": 3.5357, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.037671903734564e-05, |
| "loss": 3.5271, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.036834946863885e-05, |
| "loss": 3.5279, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0359963521128333e-05, |
| "loss": 3.5202, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0351577573617813e-05, |
| "loss": 3.5361, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0343191626107293e-05, |
| "loss": 3.5199, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0334822057400502e-05, |
| "loss": 3.5237, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0326436109889982e-05, |
| "loss": 3.5242, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0318050162379462e-05, |
| "loss": 3.5373, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0309664214868942e-05, |
| "loss": 3.5268, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0301278267358425e-05, |
| "loss": 3.5277, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0292908698651634e-05, |
| "loss": 3.5431, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0284522751141114e-05, |
| "loss": 3.5384, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0276136803630594e-05, |
| "loss": 3.5392, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0267750856120074e-05, |
| "loss": 3.5323, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0259381287413287e-05, |
| "loss": 3.529, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0250995339902767e-05, |
| "loss": 3.5267, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0242609392392247e-05, |
| "loss": 3.5176, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0234223444881727e-05, |
| "loss": 3.5316, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0225853876174936e-05, |
| "loss": 3.5275, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0217467928664416e-05, |
| "loss": 3.5333, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0209081981153896e-05, |
| "loss": 3.5286, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0200696033643372e-05, |
| "loss": 3.534, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0192326464936588e-05, |
| "loss": 3.5257, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0183940517426068e-05, |
| "loss": 3.5252, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0175554569915548e-05, |
| "loss": 3.5342, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0167168622405025e-05, |
| "loss": 3.5314, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.015879905369824e-05, |
| "loss": 3.5404, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.015041310618772e-05, |
| "loss": 3.5297, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.01420271586772e-05, |
| "loss": 3.5333, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0133641211166674e-05, |
| "loss": 3.5337, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0125255263656153e-05, |
| "loss": 3.5299, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.011688569494937e-05, |
| "loss": 3.5331, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0108499747438846e-05, |
| "loss": 3.5288, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0100113799928326e-05, |
| "loss": 3.534, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0091727852417806e-05, |
| "loss": 3.5363, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0083358283711022e-05, |
| "loss": 3.5281, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0074972336200495e-05, |
| "loss": 3.5272, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0066586388689978e-05, |
| "loss": 3.5329, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0058200441179458e-05, |
| "loss": 3.5313, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0049814493668938e-05, |
| "loss": 3.5274, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0041444924962147e-05, |
| "loss": 3.54, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0033058977451627e-05, |
| "loss": 3.525, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0024673029941107e-05, |
| "loss": 3.5425, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0016287082430587e-05, |
| "loss": 3.5288, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.840222120285034, |
| "eval_runtime": 301.7358, |
| "eval_samples_per_second": 1264.653, |
| "eval_steps_per_second": 39.521, |
| "step": 1221109 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.00079175137238e-05, |
| "loss": 3.5263, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.999953156621328e-05, |
| "loss": 3.5236, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.999114561870276e-05, |
| "loss": 3.519, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.998275967119224e-05, |
| "loss": 3.5386, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.997439010248545e-05, |
| "loss": 3.5352, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9966004154974932e-05, |
| "loss": 3.5311, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9957618207464412e-05, |
| "loss": 3.5263, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9949232259953892e-05, |
| "loss": 3.5265, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.99408626912471e-05, |
| "loss": 3.5148, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.993247674373658e-05, |
| "loss": 3.5282, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.992409079622606e-05, |
| "loss": 3.5243, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.991570484871554e-05, |
| "loss": 3.5416, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.990735165881249e-05, |
| "loss": 3.537, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.989896571130197e-05, |
| "loss": 3.5088, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9890579763791442e-05, |
| "loss": 3.5207, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9882193816280922e-05, |
| "loss": 3.5192, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9873807868770402e-05, |
| "loss": 3.5233, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9865421921259886e-05, |
| "loss": 3.5237, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9857035973749366e-05, |
| "loss": 3.5248, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9848650026238845e-05, |
| "loss": 3.521, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9840280457532055e-05, |
| "loss": 3.5422, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9831894510021535e-05, |
| "loss": 3.5276, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9823508562511014e-05, |
| "loss": 3.522, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9815122615000494e-05, |
| "loss": 3.519, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9806753046293707e-05, |
| "loss": 3.5418, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9798367098783187e-05, |
| "loss": 3.5211, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9789981151272667e-05, |
| "loss": 3.5181, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9781595203762147e-05, |
| "loss": 3.5266, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9773209256251627e-05, |
| "loss": 3.5184, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.976483968754484e-05, |
| "loss": 3.5217, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.975645374003432e-05, |
| "loss": 3.5233, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.97480677925238e-05, |
| "loss": 3.5211, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.973968184501328e-05, |
| "loss": 3.5256, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9731312276306488e-05, |
| "loss": 3.5268, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9722926328795968e-05, |
| "loss": 3.5323, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9714540381285448e-05, |
| "loss": 3.5248, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.970615443377493e-05, |
| "loss": 3.5303, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.969778486506814e-05, |
| "loss": 3.5163, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.968939891755762e-05, |
| "loss": 3.5215, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.96810129700471e-05, |
| "loss": 3.5151, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.967262702253658e-05, |
| "loss": 3.5166, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9664257453829793e-05, |
| "loss": 3.5168, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9655871506319273e-05, |
| "loss": 3.5268, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9647485558808753e-05, |
| "loss": 3.5183, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9639099611298233e-05, |
| "loss": 3.5271, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9630730042591442e-05, |
| "loss": 3.5272, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9622344095080922e-05, |
| "loss": 3.5274, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9613958147570402e-05, |
| "loss": 3.5224, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9605572200059882e-05, |
| "loss": 3.5072, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9597186252549365e-05, |
| "loss": 3.5204, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9588816683842574e-05, |
| "loss": 3.5288, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9580430736332054e-05, |
| "loss": 3.5223, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9572044788821534e-05, |
| "loss": 3.516, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9563658841311014e-05, |
| "loss": 3.5176, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9555289272604227e-05, |
| "loss": 3.5118, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9546903325093706e-05, |
| "loss": 3.5058, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9538517377583186e-05, |
| "loss": 3.5291, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9530131430072666e-05, |
| "loss": 3.5105, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9521761861365875e-05, |
| "loss": 3.5249, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9513375913855355e-05, |
| "loss": 3.5213, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9504989966344835e-05, |
| "loss": 3.5068, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.949660401883432e-05, |
| "loss": 3.5315, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9488234450127528e-05, |
| "loss": 3.5134, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9479848502617008e-05, |
| "loss": 3.5158, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9471462555106488e-05, |
| "loss": 3.5059, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9463076607595968e-05, |
| "loss": 3.5246, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.945470703888918e-05, |
| "loss": 3.5097, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.944632109137866e-05, |
| "loss": 3.5147, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.943793514386814e-05, |
| "loss": 3.5026, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.942954919635762e-05, |
| "loss": 3.5091, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.942117962765083e-05, |
| "loss": 3.5231, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.941279368014031e-05, |
| "loss": 3.5266, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.940440773262979e-05, |
| "loss": 3.5144, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9396021785119272e-05, |
| "loss": 3.5216, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.938765221641248e-05, |
| "loss": 3.5329, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.937926626890196e-05, |
| "loss": 3.5159, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.937088032139144e-05, |
| "loss": 3.5346, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.936249437388092e-05, |
| "loss": 3.5113, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9354124805174134e-05, |
| "loss": 3.5153, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9345738857663614e-05, |
| "loss": 3.52, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9337352910153094e-05, |
| "loss": 3.526, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9328966962642574e-05, |
| "loss": 3.5204, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9320597393935783e-05, |
| "loss": 3.5303, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9312211446425263e-05, |
| "loss": 3.5106, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9303825498914743e-05, |
| "loss": 3.4992, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.929543955140422e-05, |
| "loss": 3.5168, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9287069982697435e-05, |
| "loss": 3.5173, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9278684035186915e-05, |
| "loss": 3.5133, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9270298087676395e-05, |
| "loss": 3.5123, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.926191214016587e-05, |
| "loss": 3.5229, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9253542571459088e-05, |
| "loss": 3.5156, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9245156623948567e-05, |
| "loss": 3.5132, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9236770676438047e-05, |
| "loss": 3.5129, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.922838472892752e-05, |
| "loss": 3.5248, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9220015160220736e-05, |
| "loss": 3.532, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9211629212710216e-05, |
| "loss": 3.5277, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9203243265199693e-05, |
| "loss": 3.5061, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9194857317689173e-05, |
| "loss": 3.5164, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.918648774898239e-05, |
| "loss": 3.5188, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.917810180147187e-05, |
| "loss": 3.5212, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9169715853961342e-05, |
| "loss": 3.5234, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9161329906450825e-05, |
| "loss": 3.5126, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.915296033774404e-05, |
| "loss": 3.5124, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.914457439023352e-05, |
| "loss": 3.522, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9136188442722994e-05, |
| "loss": 3.5247, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9127802495212474e-05, |
| "loss": 3.5113, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9119416547701954e-05, |
| "loss": 3.5186, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9111046978995167e-05, |
| "loss": 3.5092, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9102661031484647e-05, |
| "loss": 3.5137, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9094275083974127e-05, |
| "loss": 3.5121, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9085889136463607e-05, |
| "loss": 3.5096, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9077519567756816e-05, |
| "loss": 3.506, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9069133620246296e-05, |
| "loss": 3.5212, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.906074767273578e-05, |
| "loss": 3.5152, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.905236172522526e-05, |
| "loss": 3.5137, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9043992156518468e-05, |
| "loss": 3.5284, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9035606209007948e-05, |
| "loss": 3.5258, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9027220261497428e-05, |
| "loss": 3.5243, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9018834313986908e-05, |
| "loss": 3.519, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.901046474528012e-05, |
| "loss": 3.5158, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.90020787977696e-05, |
| "loss": 3.5099, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.899369285025908e-05, |
| "loss": 3.5089, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.898530690274856e-05, |
| "loss": 3.5136, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.897693733404177e-05, |
| "loss": 3.5164, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.896855138653125e-05, |
| "loss": 3.5208, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8960165439020733e-05, |
| "loss": 3.5141, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8951779491510213e-05, |
| "loss": 3.52, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8943409922803422e-05, |
| "loss": 3.5182, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.89350239752929e-05, |
| "loss": 3.5124, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.892663802778238e-05, |
| "loss": 3.5115, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.891825208027186e-05, |
| "loss": 3.5241, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8909882511565074e-05, |
| "loss": 3.5232, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8901496564054554e-05, |
| "loss": 3.5183, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8893110616544034e-05, |
| "loss": 3.5169, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8884724669033514e-05, |
| "loss": 3.5194, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8876355100326723e-05, |
| "loss": 3.5188, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8867969152816203e-05, |
| "loss": 3.5209, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8859583205305686e-05, |
| "loss": 3.5135, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8851197257795166e-05, |
| "loss": 3.5203, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8842827689088375e-05, |
| "loss": 3.523, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8834441741577855e-05, |
| "loss": 3.5145, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8826055794067335e-05, |
| "loss": 3.5124, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8817669846556815e-05, |
| "loss": 3.5186, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8809300277850028e-05, |
| "loss": 3.5195, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8800914330339508e-05, |
| "loss": 3.5075, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8792528382828988e-05, |
| "loss": 3.5277, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8784142435318468e-05, |
| "loss": 3.5132, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8775772866611677e-05, |
| "loss": 3.5248, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8767386919101157e-05, |
| "loss": 3.5189, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.875900097159064e-05, |
| "loss": 3.5067, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8409504890441895, |
| "eval_runtime": 303.2407, |
| "eval_samples_per_second": 1258.377, |
| "eval_steps_per_second": 39.325, |
| "step": 1297429 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.875061502408012e-05, |
| "loss": 3.5091, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.87422290765696e-05, |
| "loss": 3.5062, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.873384312905908e-05, |
| "loss": 3.5253, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.872547356035229e-05, |
| "loss": 3.5168, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.871708761284177e-05, |
| "loss": 3.5194, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.870870166533125e-05, |
| "loss": 3.5175, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.870031571782073e-05, |
| "loss": 3.5078, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8691929770310212e-05, |
| "loss": 3.5043, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.868356020160342e-05, |
| "loss": 3.516, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.86751742540929e-05, |
| "loss": 3.5064, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.866678830658238e-05, |
| "loss": 3.5299, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.865840235907186e-05, |
| "loss": 3.5187, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8650032790365074e-05, |
| "loss": 3.5031, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8641646842854554e-05, |
| "loss": 3.502, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8633260895344034e-05, |
| "loss": 3.5079, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8624874947833513e-05, |
| "loss": 3.5045, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8616489000322993e-05, |
| "loss": 3.5114, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8608103052812473e-05, |
| "loss": 3.5139, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.859971710530195e-05, |
| "loss": 3.5038, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.859133115779143e-05, |
| "loss": 3.5325, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8582961589084646e-05, |
| "loss": 3.5096, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8574575641574126e-05, |
| "loss": 3.5092, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.85661896940636e-05, |
| "loss": 3.5061, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8557820125356815e-05, |
| "loss": 3.5263, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8549434177846295e-05, |
| "loss": 3.5052, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.854104823033577e-05, |
| "loss": 3.5095, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.853266228282525e-05, |
| "loss": 3.5072, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.852427633531473e-05, |
| "loss": 3.5082, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8515906766607947e-05, |
| "loss": 3.5044, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.850752081909742e-05, |
| "loss": 3.5108, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8499134871586904e-05, |
| "loss": 3.5101, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8490748924076384e-05, |
| "loss": 3.508, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.84823793553696e-05, |
| "loss": 3.5147, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8473993407859073e-05, |
| "loss": 3.5188, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8465607460348553e-05, |
| "loss": 3.5106, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8457221512838033e-05, |
| "loss": 3.5151, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8448851944131245e-05, |
| "loss": 3.5031, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8440465996620725e-05, |
| "loss": 3.5112, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8432080049110205e-05, |
| "loss": 3.4995, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8423694101599685e-05, |
| "loss": 3.5089, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8415324532892894e-05, |
| "loss": 3.5042, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8406938585382374e-05, |
| "loss": 3.5116, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8398552637871857e-05, |
| "loss": 3.5036, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8390166690361337e-05, |
| "loss": 3.5149, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8381797121654546e-05, |
| "loss": 3.5123, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8373411174144026e-05, |
| "loss": 3.517, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8365025226633506e-05, |
| "loss": 3.5089, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8356639279122986e-05, |
| "loss": 3.4961, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8348253331612466e-05, |
| "loss": 3.5007, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.833988376290568e-05, |
| "loss": 3.5179, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.833149781539516e-05, |
| "loss": 3.5047, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.832311186788464e-05, |
| "loss": 3.5053, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.831472592037412e-05, |
| "loss": 3.4992, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8306356351667328e-05, |
| "loss": 3.5045, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.829797040415681e-05, |
| "loss": 3.4887, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.828958445664629e-05, |
| "loss": 3.5185, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.828119850913577e-05, |
| "loss": 3.4934, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.827282894042898e-05, |
| "loss": 3.5129, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.826444299291846e-05, |
| "loss": 3.5107, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.825605704540794e-05, |
| "loss": 3.4947, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.824767109789742e-05, |
| "loss": 3.5123, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8239301529190632e-05, |
| "loss": 3.5019, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8230915581680112e-05, |
| "loss": 3.5049, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8222529634169592e-05, |
| "loss": 3.4885, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8214143686659072e-05, |
| "loss": 3.5117, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.820577411795228e-05, |
| "loss": 3.496, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8197388170441765e-05, |
| "loss": 3.5028, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8189002222931245e-05, |
| "loss": 3.4863, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8180616275420725e-05, |
| "loss": 3.4985, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8172246706713934e-05, |
| "loss": 3.5081, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8163860759203414e-05, |
| "loss": 3.5111, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8155474811692894e-05, |
| "loss": 3.5079, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8147088864182373e-05, |
| "loss": 3.5071, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8138719295475586e-05, |
| "loss": 3.5143, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8130333347965066e-05, |
| "loss": 3.5048, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8121947400454546e-05, |
| "loss": 3.5168, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8113561452944026e-05, |
| "loss": 3.5029, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8105191884237235e-05, |
| "loss": 3.5046, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8096805936726718e-05, |
| "loss": 3.5018, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8088419989216198e-05, |
| "loss": 3.5118, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8080034041705678e-05, |
| "loss": 3.5091, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8071664472998887e-05, |
| "loss": 3.5152, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8063278525488367e-05, |
| "loss": 3.5037, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8054892577977847e-05, |
| "loss": 3.4861, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8046506630467327e-05, |
| "loss": 3.4985, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.803813706176054e-05, |
| "loss": 3.5042, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.802975111425002e-05, |
| "loss": 3.5016, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.80213651667395e-05, |
| "loss": 3.4964, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.801297921922898e-05, |
| "loss": 3.5109, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.800460965052219e-05, |
| "loss": 3.502, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7996223703011672e-05, |
| "loss": 3.4978, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7987837755501152e-05, |
| "loss": 3.5026, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7979451807990632e-05, |
| "loss": 3.5111, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.797108223928384e-05, |
| "loss": 3.5149, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.796269629177332e-05, |
| "loss": 3.5195, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.79543103442628e-05, |
| "loss": 3.4902, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.794592439675228e-05, |
| "loss": 3.5041, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7937554828045493e-05, |
| "loss": 3.5058, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7929168880534973e-05, |
| "loss": 3.5057, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7920782933024453e-05, |
| "loss": 3.5153, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7912396985513933e-05, |
| "loss": 3.4952, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7904027416807142e-05, |
| "loss": 3.499, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7895641469296626e-05, |
| "loss": 3.5112, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7887255521786106e-05, |
| "loss": 3.5129, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7878869574275586e-05, |
| "loss": 3.4955, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7870500005568795e-05, |
| "loss": 3.504, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7862114058058275e-05, |
| "loss": 3.4981, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7853728110547755e-05, |
| "loss": 3.4996, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7845342163037234e-05, |
| "loss": 3.4958, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7836956215526714e-05, |
| "loss": 3.4975, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7828586646819927e-05, |
| "loss": 3.4956, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7820200699309407e-05, |
| "loss": 3.5052, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7811814751798887e-05, |
| "loss": 3.5002, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7803428804288367e-05, |
| "loss": 3.5002, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7795059235581576e-05, |
| "loss": 3.5158, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.778667328807106e-05, |
| "loss": 3.5133, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.777828734056054e-05, |
| "loss": 3.5071, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.776990139305002e-05, |
| "loss": 3.5074, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7761531824343228e-05, |
| "loss": 3.5027, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7753145876832708e-05, |
| "loss": 3.4986, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7744759929322188e-05, |
| "loss": 3.4916, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7736373981811668e-05, |
| "loss": 3.5026, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.772800441310488e-05, |
| "loss": 3.5052, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.771961846559436e-05, |
| "loss": 3.5051, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.771123251808384e-05, |
| "loss": 3.5031, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.770284657057332e-05, |
| "loss": 3.5052, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.769447700186653e-05, |
| "loss": 3.5026, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7686091054356013e-05, |
| "loss": 3.4994, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7677705106845493e-05, |
| "loss": 3.4983, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7669319159334973e-05, |
| "loss": 3.5113, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7660949590628182e-05, |
| "loss": 3.5094, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7652563643117662e-05, |
| "loss": 3.5064, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7644177695607142e-05, |
| "loss": 3.5006, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7635791748096622e-05, |
| "loss": 3.5043, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.76274058005861e-05, |
| "loss": 3.5042, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7619036231879314e-05, |
| "loss": 3.511, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7610650284368794e-05, |
| "loss": 3.5009, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7602264336858267e-05, |
| "loss": 3.5073, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.759387838934775e-05, |
| "loss": 3.5033, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7585508820640967e-05, |
| "loss": 3.5037, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7577122873130447e-05, |
| "loss": 3.5021, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.756873692561992e-05, |
| "loss": 3.5045, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.75603509781094e-05, |
| "loss": 3.5067, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7551981409402616e-05, |
| "loss": 3.4948, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7543595461892092e-05, |
| "loss": 3.5169, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7535209514381572e-05, |
| "loss": 3.5038, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7526823566871052e-05, |
| "loss": 3.5094, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7518453998164268e-05, |
| "loss": 3.5064, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.751006805065374e-05, |
| "loss": 3.4946, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8430843353271484, |
| "eval_runtime": 306.7261, |
| "eval_samples_per_second": 1244.078, |
| "eval_steps_per_second": 38.878, |
| "step": 1373749 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.750168210314322e-05, |
| "loss": 3.4961, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7493296155632704e-05, |
| "loss": 3.4919, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7484910208122184e-05, |
| "loss": 3.5129, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7476524260611664e-05, |
| "loss": 3.5, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7468154691904873e-05, |
| "loss": 3.5123, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7459768744394353e-05, |
| "loss": 3.499, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7451382796883833e-05, |
| "loss": 3.5001, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7442996849373313e-05, |
| "loss": 3.4882, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7434627280666526e-05, |
| "loss": 3.4992, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7426241333156006e-05, |
| "loss": 3.4973, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7417855385645486e-05, |
| "loss": 3.5138, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7409469438134966e-05, |
| "loss": 3.5047, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.740111624823191e-05, |
| "loss": 3.4898, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.739273030072139e-05, |
| "loss": 3.4906, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7384344353210867e-05, |
| "loss": 3.5023, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7375958405700347e-05, |
| "loss": 3.4827, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7367572458189827e-05, |
| "loss": 3.5003, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7359186510679307e-05, |
| "loss": 3.4994, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7350800563168787e-05, |
| "loss": 3.492, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7342414615658267e-05, |
| "loss": 3.5173, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.733404504695148e-05, |
| "loss": 3.4971, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.732565909944096e-05, |
| "loss": 3.4991, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.731727315193044e-05, |
| "loss": 3.493, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.730888720441992e-05, |
| "loss": 3.5142, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.730051763571313e-05, |
| "loss": 3.4853, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7292131688202612e-05, |
| "loss": 3.5018, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.728374574069209e-05, |
| "loss": 3.4902, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.727535979318157e-05, |
| "loss": 3.495, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.726699022447478e-05, |
| "loss": 3.4973, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.725860427696426e-05, |
| "loss": 3.4931, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.725021832945374e-05, |
| "loss": 3.4982, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.724183238194322e-05, |
| "loss": 3.4968, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7233462813236433e-05, |
| "loss": 3.4984, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7225076865725913e-05, |
| "loss": 3.5084, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7216690918215393e-05, |
| "loss": 3.4953, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7208304970704873e-05, |
| "loss": 3.5009, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7199935401998082e-05, |
| "loss": 3.4915, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7191549454487565e-05, |
| "loss": 3.4949, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7183163506977045e-05, |
| "loss": 3.4874, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7174777559466525e-05, |
| "loss": 3.4933, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7166391611956005e-05, |
| "loss": 3.4909, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7158022043249214e-05, |
| "loss": 3.5029, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7149636095738694e-05, |
| "loss": 3.4902, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7141250148228174e-05, |
| "loss": 3.5011, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7132864200717658e-05, |
| "loss": 3.5029, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7124494632010867e-05, |
| "loss": 3.4988, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7116108684500347e-05, |
| "loss": 3.4958, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7107722736989827e-05, |
| "loss": 3.4838, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7099336789479307e-05, |
| "loss": 3.4906, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.709096722077252e-05, |
| "loss": 3.503, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7082581273262e-05, |
| "loss": 3.4924, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.707419532575148e-05, |
| "loss": 3.4956, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.706580937824096e-05, |
| "loss": 3.4822, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7057439809534168e-05, |
| "loss": 3.4939, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7049053862023648e-05, |
| "loss": 3.4751, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7040667914513128e-05, |
| "loss": 3.5069, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7032281967002608e-05, |
| "loss": 3.4792, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.702389601949209e-05, |
| "loss": 3.5019, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.70155264507853e-05, |
| "loss": 3.4936, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.700714050327478e-05, |
| "loss": 3.4814, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.699875455576426e-05, |
| "loss": 3.5014, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.699036860825374e-05, |
| "loss": 3.4913, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6981999039546953e-05, |
| "loss": 3.4884, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6973613092036433e-05, |
| "loss": 3.4786, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6965227144525913e-05, |
| "loss": 3.5021, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6956841197015393e-05, |
| "loss": 3.4828, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.69484716283086e-05, |
| "loss": 3.4885, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.694008568079808e-05, |
| "loss": 3.4753, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.693169973328756e-05, |
| "loss": 3.4856, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6923313785777045e-05, |
| "loss": 3.4953, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6914944217070254e-05, |
| "loss": 3.5003, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6906558269559734e-05, |
| "loss": 3.4897, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6898172322049214e-05, |
| "loss": 3.4985, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6889786374538694e-05, |
| "loss": 3.4996, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6881416805831906e-05, |
| "loss": 3.4942, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6873030858321386e-05, |
| "loss": 3.4998, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6864644910810866e-05, |
| "loss": 3.4941, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6856258963300346e-05, |
| "loss": 3.4897, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6847889394593555e-05, |
| "loss": 3.4893, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6839503447083035e-05, |
| "loss": 3.4978, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6831117499572515e-05, |
| "loss": 3.4994, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6822731552062e-05, |
| "loss": 3.5028, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6814361983355208e-05, |
| "loss": 3.4885, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6805976035844688e-05, |
| "loss": 3.4684, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6797590088334168e-05, |
| "loss": 3.4864, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6789204140823644e-05, |
| "loss": 3.494, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.678083457211686e-05, |
| "loss": 3.4896, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.677244862460634e-05, |
| "loss": 3.4828, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.676406267709582e-05, |
| "loss": 3.497, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6755676729585293e-05, |
| "loss": 3.4899, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.674730716087851e-05, |
| "loss": 3.4805, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.673892121336799e-05, |
| "loss": 3.4876, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.673053526585747e-05, |
| "loss": 3.5014, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6722149318346945e-05, |
| "loss": 3.4991, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.671377974964016e-05, |
| "loss": 3.5031, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.670539380212964e-05, |
| "loss": 3.484, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6697007854619114e-05, |
| "loss": 3.491, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6688621907108598e-05, |
| "loss": 3.4912, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6680252338401814e-05, |
| "loss": 3.4939, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6671866390891294e-05, |
| "loss": 3.4991, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6663480443380767e-05, |
| "loss": 3.4844, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6655094495870247e-05, |
| "loss": 3.4878, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6646724927163463e-05, |
| "loss": 3.4969, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6638338979652943e-05, |
| "loss": 3.5013, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.662995303214242e-05, |
| "loss": 3.4834, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.66215670846319e-05, |
| "loss": 3.4894, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6613197515925115e-05, |
| "loss": 3.4894, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6604811568414588e-05, |
| "loss": 3.4876, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6596425620904068e-05, |
| "loss": 3.479, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.658803967339355e-05, |
| "loss": 3.4829, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6579670104686767e-05, |
| "loss": 3.4857, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.657128415717624e-05, |
| "loss": 3.492, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.656289820966572e-05, |
| "loss": 3.4847, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.65545122621552e-05, |
| "loss": 3.4881, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6546142693448416e-05, |
| "loss": 3.503, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6537756745937893e-05, |
| "loss": 3.5014, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6529370798427373e-05, |
| "loss": 3.4936, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6520984850916853e-05, |
| "loss": 3.4966, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6512615282210062e-05, |
| "loss": 3.4887, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6504229334699542e-05, |
| "loss": 3.4835, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6495843387189022e-05, |
| "loss": 3.481, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6487457439678505e-05, |
| "loss": 3.4856, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6479087870971714e-05, |
| "loss": 3.492, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6470701923461194e-05, |
| "loss": 3.4965, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6462315975950674e-05, |
| "loss": 3.4878, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6453930028440154e-05, |
| "loss": 3.4925, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6445560459733367e-05, |
| "loss": 3.4913, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6437174512222847e-05, |
| "loss": 3.485, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6428788564712326e-05, |
| "loss": 3.4854, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6420402617201806e-05, |
| "loss": 3.4989, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6412033048495016e-05, |
| "loss": 3.4982, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6403647100984495e-05, |
| "loss": 3.495, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6395261153473975e-05, |
| "loss": 3.4873, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.638687520596346e-05, |
| "loss": 3.4897, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6378505637256668e-05, |
| "loss": 3.4924, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6370119689746148e-05, |
| "loss": 3.5052, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6361733742235628e-05, |
| "loss": 3.4881, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6353347794725108e-05, |
| "loss": 3.4917, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.634497822601832e-05, |
| "loss": 3.4885, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.63365922785078e-05, |
| "loss": 3.4932, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.632820633099728e-05, |
| "loss": 3.4872, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.631982038348676e-05, |
| "loss": 3.4914, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.631145081477997e-05, |
| "loss": 3.4957, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.630306486726945e-05, |
| "loss": 3.4805, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.629467891975893e-05, |
| "loss": 3.5021, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6286292972248412e-05, |
| "loss": 3.4953, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.627792340354162e-05, |
| "loss": 3.4923, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.62695374560311e-05, |
| "loss": 3.4928, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.626115150852058e-05, |
| "loss": 3.4805, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.843136787414551, |
| "eval_runtime": 310.7009, |
| "eval_samples_per_second": 1228.162, |
| "eval_steps_per_second": 38.381, |
| "step": 1450069 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.0085987995368929e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|