| { |
| "best_metric": 3.8492636680603027, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/rel-cl2/transformer/1/checkpoints/checkpoint-915840", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 1908000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.9931, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8206, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.2067, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.0045, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8356, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.7277, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.6144, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5429, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4633, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.4028, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.364, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.316, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 5.269, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989098268236324e-05, |
| "loss": 5.2209, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988259673485272e-05, |
| "loss": 5.1951, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98742107873422e-05, |
| "loss": 5.1482, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1226, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0968, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.064, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0333, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.0177, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.986, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9694, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9416, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.9363, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.901, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8872, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8597, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8517, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.8274, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.8213, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.8086, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.7926, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 4.7812, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 4.7671, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970659010995419e-05, |
| "loss": 4.7526, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969820416244367e-05, |
| "loss": 4.7505, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.7196, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.7092, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.6879, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6908, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 4.6757, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964788847738054e-05, |
| "loss": 4.6789, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 4.6558, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6507, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6451, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 4.6392, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 4.6264, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959758917112116e-05, |
| "loss": 4.6105, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9589219602414374e-05, |
| "loss": 4.6, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958085003370758e-05, |
| "loss": 4.6196, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957246408619706e-05, |
| "loss": 4.6088, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956407813868654e-05, |
| "loss": 4.5851, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9555692191176016e-05, |
| "loss": 4.5728, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9547306243665496e-05, |
| "loss": 4.5805, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9538920296154976e-05, |
| "loss": 4.5585, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530534348644456e-05, |
| "loss": 4.5683, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522148401133936e-05, |
| "loss": 4.5391, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951376245362342e-05, |
| "loss": 4.5604, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5444, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949700693740611e-05, |
| "loss": 4.5224, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 4.5168, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948023504238507e-05, |
| "loss": 4.53, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947184909487455e-05, |
| "loss": 4.4904, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 4.5044, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945509357865724e-05, |
| "loss": 4.5108, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944670763114672e-05, |
| "loss": 4.495, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94383216836362e-05, |
| "loss": 4.4886, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 4.471, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.4701, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4657, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.483, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396424703691065e-05, |
| "loss": 4.458, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388038756180545e-05, |
| "loss": 4.468, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379652808670025e-05, |
| "loss": 4.4616, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371266861159505e-05, |
| "loss": 4.4608, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362897292452714e-05, |
| "loss": 4.4439, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354511344942194e-05, |
| "loss": 4.4426, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346125397431674e-05, |
| "loss": 4.4391, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 4.435, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 4.4266, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 4.4196, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 4.4218, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 4.4218, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 4.4004, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 4.4067, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 4.4066, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 4.4098, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926229867993394e-05, |
| "loss": 4.3933, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925392911122715e-05, |
| "loss": 4.3914, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924554316371663e-05, |
| "loss": 4.3841, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923715721620611e-05, |
| "loss": 4.3783, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922877126869559e-05, |
| "loss": 4.3791, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922038532118507e-05, |
| "loss": 4.3816, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921201575247828e-05, |
| "loss": 4.3752, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920362980496776e-05, |
| "loss": 4.3825, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919524385745724e-05, |
| "loss": 4.3724, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918685790994672e-05, |
| "loss": 4.3567, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91784719624362e-05, |
| "loss": 4.3554, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9170086014925676e-05, |
| "loss": 4.3648, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9161700067415156e-05, |
| "loss": 4.3604, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9153314119904636e-05, |
| "loss": 4.3596, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914494455119785e-05, |
| "loss": 4.3484, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136558603687325e-05, |
| "loss": 4.3395, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128172656176805e-05, |
| "loss": 4.3523, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.3292, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911143351876323e-05, |
| "loss": 4.3351, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910304757125272e-05, |
| "loss": 4.3313, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90946616237422e-05, |
| "loss": 4.3313, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908627567623168e-05, |
| "loss": 4.321, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907788972872115e-05, |
| "loss": 4.3209, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906950378121063e-05, |
| "loss": 4.315, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906111783370011e-05, |
| "loss": 4.322, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9052748264993326e-05, |
| "loss": 4.3163, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90443623174828e-05, |
| "loss": 4.3067, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903597636997228e-05, |
| "loss": 4.3196, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902759042246176e-05, |
| "loss": 4.323, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9019220853754975e-05, |
| "loss": 4.3104, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010834906244455e-05, |
| "loss": 4.2994, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9002448958733935e-05, |
| "loss": 4.2952, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994063011223415e-05, |
| "loss": 4.2948, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985677063712895e-05, |
| "loss": 4.3004, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977291116202375e-05, |
| "loss": 4.2969, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968905168691855e-05, |
| "loss": 4.2846, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960535599985064e-05, |
| "loss": 4.2939, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952149652474544e-05, |
| "loss": 4.2869, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943763704964024e-05, |
| "loss": 4.2863, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935377757453504e-05, |
| "loss": 4.2799, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926991809942984e-05, |
| "loss": 4.2802, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918605862432463e-05, |
| "loss": 4.2728, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910219914921943e-05, |
| "loss": 4.2827, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8901833967411423e-05, |
| "loss": 4.2813, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889346439870464e-05, |
| "loss": 4.2799, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888507845119412e-05, |
| "loss": 4.2573, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887670888248733e-05, |
| "loss": 4.2724, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886832293497681e-05, |
| "loss": 4.2666, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885993698746629e-05, |
| "loss": 4.2693, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885155103995577e-05, |
| "loss": 4.2594, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884316509244525e-05, |
| "loss": 4.2672, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883479552373846e-05, |
| "loss": 4.2572, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882640957622794e-05, |
| "loss": 4.2737, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881802362871742e-05, |
| "loss": 4.2439, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88096376812069e-05, |
| "loss": 4.2614, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880125173369638e-05, |
| "loss": 4.2468, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8792882164989586e-05, |
| "loss": 4.2432, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878449621747907e-05, |
| "loss": 4.2471, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877611026996855e-05, |
| "loss": 4.2341, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876772432245803e-05, |
| "loss": 4.2379, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875933837494751e-05, |
| "loss": 4.2444, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8750952427436986e-05, |
| "loss": 4.2329, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.200936317443848, |
| "eval_runtime": 303.3774, |
| "eval_samples_per_second": 1257.81, |
| "eval_steps_per_second": 39.307, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8742566479926466e-05, |
| "loss": 4.2284, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8734180532415946e-05, |
| "loss": 4.2245, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.872581096370916e-05, |
| "loss": 4.2383, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8717425016198635e-05, |
| "loss": 4.2284, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709039068688115e-05, |
| "loss": 4.2377, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700653121177595e-05, |
| "loss": 4.2149, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692267173667075e-05, |
| "loss": 4.2172, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683881226156555e-05, |
| "loss": 4.2077, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.867551165744977e-05, |
| "loss": 4.2168, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.866712570993925e-05, |
| "loss": 4.2138, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865873976242873e-05, |
| "loss": 4.2097, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865035381491821e-05, |
| "loss": 4.2144, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864198424621142e-05, |
| "loss": 4.1946, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.86335982987009e-05, |
| "loss": 4.2068, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862521235119038e-05, |
| "loss": 4.1924, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.861682640367986e-05, |
| "loss": 4.193, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860844045616934e-05, |
| "loss": 4.1975, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860005450865882e-05, |
| "loss": 4.1908, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.85916685611483e-05, |
| "loss": 4.1948, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858328261363778e-05, |
| "loss": 4.2094, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857489666612726e-05, |
| "loss": 4.1843, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8566527097420475e-05, |
| "loss": 4.1883, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558141149909955e-05, |
| "loss": 4.1875, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8549755202399435e-05, |
| "loss": 4.1935, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541369254888915e-05, |
| "loss": 4.1843, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532983307378395e-05, |
| "loss": 4.1848, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524613738671604e-05, |
| "loss": 4.1704, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8516227791161084e-05, |
| "loss": 4.1762, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8507841843650564e-05, |
| "loss": 4.1656, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8499455896140044e-05, |
| "loss": 4.175, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.1702, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.848270037992273e-05, |
| "loss": 4.1736, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847431443241221e-05, |
| "loss": 4.1779, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846592848490169e-05, |
| "loss": 4.1655, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845755891619491e-05, |
| "loss": 4.1719, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844917296868439e-05, |
| "loss": 4.1682, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844078702117387e-05, |
| "loss": 4.1574, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843240107366335e-05, |
| "loss": 4.1509, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842401512615282e-05, |
| "loss": 4.1432, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84156291786423e-05, |
| "loss": 4.1506, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.1483, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839885728362126e-05, |
| "loss": 4.1594, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839047133611074e-05, |
| "loss": 4.1446, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838208538860022e-05, |
| "loss": 4.1449, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.837371581989343e-05, |
| "loss": 4.1496, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836532987238292e-05, |
| "loss": 4.1418, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83569439248724e-05, |
| "loss": 4.1443, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834855797736188e-05, |
| "loss": 4.1306, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8340188408655086e-05, |
| "loss": 4.1282, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8331802461144566e-05, |
| "loss": 4.152, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8323416513634046e-05, |
| "loss": 4.1481, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8315030566123526e-05, |
| "loss": 4.131, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8306644618613006e-05, |
| "loss": 4.1331, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298275049906215e-05, |
| "loss": 4.1391, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289889102395695e-05, |
| "loss": 4.1196, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281503154885175e-05, |
| "loss": 4.1384, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273117207374655e-05, |
| "loss": 4.1138, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8264731259864135e-05, |
| "loss": 4.1401, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8256345312353615e-05, |
| "loss": 4.1346, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8247959364843095e-05, |
| "loss": 4.1165, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823958979613631e-05, |
| "loss": 4.1145, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823120384862579e-05, |
| "loss": 4.1289, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.822281790111527e-05, |
| "loss": 4.0973, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821443195360475e-05, |
| "loss": 4.1129, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820606238489796e-05, |
| "loss": 4.1245, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819767643738744e-05, |
| "loss": 4.1161, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818929048987692e-05, |
| "loss": 4.1088, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81809045423664e-05, |
| "loss": 4.0975, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817255135246334e-05, |
| "loss": 4.1041, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816416540495282e-05, |
| "loss": 4.1054, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8155779457442305e-05, |
| "loss": 4.1117, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8147393509931785e-05, |
| "loss": 4.1015, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8139007562421265e-05, |
| "loss": 4.1116, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8130621614910745e-05, |
| "loss": 4.1126, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8122235667400224e-05, |
| "loss": 4.1107, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8113849719889704e-05, |
| "loss": 4.0946, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8105480151182914e-05, |
| "loss": 4.1032, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8097094203672393e-05, |
| "loss": 4.1025, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8088708256161873e-05, |
| "loss": 4.0981, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808032230865135e-05, |
| "loss": 4.0955, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8071936361140827e-05, |
| "loss": 4.0918, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806356679243404e-05, |
| "loss": 4.1002, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805518084492352e-05, |
| "loss": 4.095, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8046794897413e-05, |
| "loss": 4.0801, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803840894990248e-05, |
| "loss": 4.0878, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80300393811957e-05, |
| "loss": 4.0892, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802165343368518e-05, |
| "loss": 4.0959, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801326748617466e-05, |
| "loss": 4.0873, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.800488153866413e-05, |
| "loss": 4.0824, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799649559115361e-05, |
| "loss": 4.0816, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798812602244683e-05, |
| "loss": 4.0728, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79797400749363e-05, |
| "loss": 4.0769, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797135412742578e-05, |
| "loss": 4.0861, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.796296817991526e-05, |
| "loss": 4.0831, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7954598611208476e-05, |
| "loss": 4.0915, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7946212663697956e-05, |
| "loss": 4.081, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7937826716187436e-05, |
| "loss": 4.0724, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7929440768676916e-05, |
| "loss": 4.071, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7921054821166396e-05, |
| "loss": 4.0783, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7912685252459605e-05, |
| "loss": 4.0827, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7904299304949085e-05, |
| "loss": 4.082, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7895913357438565e-05, |
| "loss": 4.0721, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887527409928045e-05, |
| "loss": 4.0617, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7879157841221254e-05, |
| "loss": 4.0815, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7870771893710734e-05, |
| "loss": 4.0601, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786240232500395e-05, |
| "loss": 4.0673, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.785401637749343e-05, |
| "loss": 4.0671, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.784563042998291e-05, |
| "loss": 4.0657, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.783724448247239e-05, |
| "loss": 4.0545, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782885853496187e-05, |
| "loss": 4.0638, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782047258745135e-05, |
| "loss": 4.0539, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.781208663994083e-05, |
| "loss": 4.0633, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.780370069243031e-05, |
| "loss": 4.0609, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.779531474491979e-05, |
| "loss": 4.0529, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786945176213e-05, |
| "loss": 4.0737, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777855922870248e-05, |
| "loss": 4.072, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777017328119196e-05, |
| "loss": 4.064, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.776178733368144e-05, |
| "loss": 4.0552, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775340138617092e-05, |
| "loss": 4.0483, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7745031817464134e-05, |
| "loss": 4.0548, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7736645869953614e-05, |
| "loss": 4.0513, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7728259922443094e-05, |
| "loss": 4.061, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7719873974932574e-05, |
| "loss": 4.043, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771150440622578e-05, |
| "loss": 4.0527, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770311845871526e-05, |
| "loss": 4.0543, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769473251120474e-05, |
| "loss": 4.0552, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768634656369422e-05, |
| "loss": 4.0429, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767797699498743e-05, |
| "loss": 4.0489, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766959104747691e-05, |
| "loss": 4.0479, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766120509996639e-05, |
| "loss": 4.0496, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765281915245587e-05, |
| "loss": 4.0541, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764444958374909e-05, |
| "loss": 4.0545, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763606363623857e-05, |
| "loss": 4.0322, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762767768872805e-05, |
| "loss": 4.0452, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761930812002126e-05, |
| "loss": 4.0486, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761092217251074e-05, |
| "loss": 4.0459, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.760253622500022e-05, |
| "loss": 4.0417, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75941502774897e-05, |
| "loss": 4.0482, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758576432997918e-05, |
| "loss": 4.0414, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757737838246866e-05, |
| "loss": 4.0585, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7568992434958137e-05, |
| "loss": 4.0316, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7560622866251346e-05, |
| "loss": 4.0462, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7552236918740826e-05, |
| "loss": 4.0367, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7543850971230306e-05, |
| "loss": 4.0306, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753548140252352e-05, |
| "loss": 4.0378, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7527095455013e-05, |
| "loss": 4.0289, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751870950750248e-05, |
| "loss": 4.0302, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751032355999196e-05, |
| "loss": 4.0317, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7501937612481435e-05, |
| "loss": 4.0289, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.030747413635254, |
| "eval_runtime": 317.6174, |
| "eval_samples_per_second": 1201.417, |
| "eval_steps_per_second": 37.545, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493551664970914e-05, |
| "loss": 4.0236, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7485165717460394e-05, |
| "loss": 4.0207, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7476779769949874e-05, |
| "loss": 4.0383, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7468393822439354e-05, |
| "loss": 4.0271, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460007874928834e-05, |
| "loss": 4.0439, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451621927418314e-05, |
| "loss": 4.0167, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443235979907794e-05, |
| "loss": 4.0239, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434850032397274e-05, |
| "loss": 4.0108, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426464084886754e-05, |
| "loss": 4.0255, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7418078137376234e-05, |
| "loss": 4.0209, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740969218986572e-05, |
| "loss": 4.0158, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740132262115893e-05, |
| "loss": 4.0285, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739295305245214e-05, |
| "loss": 4.0052, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738456710494162e-05, |
| "loss": 4.022, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73761811574311e-05, |
| "loss": 4.0038, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736779520992058e-05, |
| "loss": 4.0067, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735940926241006e-05, |
| "loss": 4.005, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735102331489954e-05, |
| "loss": 4.0143, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734263736738902e-05, |
| "loss": 4.0088, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73342514198785e-05, |
| "loss": 4.0286, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732588185117171e-05, |
| "loss": 4.0062, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731749590366119e-05, |
| "loss": 4.0108, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7309109956150674e-05, |
| "loss": 4.0087, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7300724008640154e-05, |
| "loss": 4.0156, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292354439933364e-05, |
| "loss": 4.0045, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7283968492422844e-05, |
| "loss": 4.0103, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7275582544912323e-05, |
| "loss": 3.9977, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.72671965974018e-05, |
| "loss": 4.0036, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725882702869501e-05, |
| "loss": 3.991, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725044108118449e-05, |
| "loss": 4.0048, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724205513367397e-05, |
| "loss": 4.0034, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723368556496718e-05, |
| "loss": 4.0037, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722529961745666e-05, |
| "loss": 4.0112, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721691366994614e-05, |
| "loss": 3.9987, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720854410123936e-05, |
| "loss": 3.9976, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720015815372884e-05, |
| "loss": 4.0052, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719177220621832e-05, |
| "loss": 3.9932, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71833862587078e-05, |
| "loss": 3.9868, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717500031119727e-05, |
| "loss": 3.9785, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716661436368675e-05, |
| "loss": 3.9864, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715822841617623e-05, |
| "loss": 3.987, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714984246866571e-05, |
| "loss": 3.9975, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714147289995892e-05, |
| "loss": 3.9872, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71330869524484e-05, |
| "loss": 3.9797, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712470100493788e-05, |
| "loss": 3.9943, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7116315057427366e-05, |
| "loss": 3.9865, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107945488720575e-05, |
| "loss": 3.9843, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099559541210055e-05, |
| "loss": 3.9756, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091173593699535e-05, |
| "loss": 3.9697, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082787646189015e-05, |
| "loss": 3.996, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074418077482224e-05, |
| "loss": 3.9941, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7066032129971704e-05, |
| "loss": 3.9787, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7057646182461184e-05, |
| "loss": 3.9796, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704927661375439e-05, |
| "loss": 3.9874, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704089066624387e-05, |
| "loss": 3.9647, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.703250471873335e-05, |
| "loss": 3.9848, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702411877122283e-05, |
| "loss": 3.9652, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701573282371232e-05, |
| "loss": 3.9849, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70073468762018e-05, |
| "loss": 3.9901, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699896092869128e-05, |
| "loss": 3.9705, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699059135998449e-05, |
| "loss": 3.9634, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698220541247397e-05, |
| "loss": 3.9835, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697381946496345e-05, |
| "loss": 3.9542, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696543351745293e-05, |
| "loss": 3.9682, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695706394874614e-05, |
| "loss": 3.9779, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694867800123562e-05, |
| "loss": 3.9753, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69402920537251e-05, |
| "loss": 3.9671, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693190610621458e-05, |
| "loss": 3.9555, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692355291631152e-05, |
| "loss": 3.9593, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6915166968801e-05, |
| "loss": 3.9619, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690678102129048e-05, |
| "loss": 3.9722, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689839507377996e-05, |
| "loss": 3.9602, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689000912626944e-05, |
| "loss": 3.9732, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688162317875892e-05, |
| "loss": 3.9726, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68732372312484e-05, |
| "loss": 3.9722, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686485128373788e-05, |
| "loss": 3.9563, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685648171503109e-05, |
| "loss": 3.9632, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684809576752057e-05, |
| "loss": 3.9677, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683970982001005e-05, |
| "loss": 3.9595, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683132387249953e-05, |
| "loss": 3.9614, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682295430379274e-05, |
| "loss": 3.9611, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681456835628222e-05, |
| "loss": 3.9591, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680618240877171e-05, |
| "loss": 3.9637, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679779646126119e-05, |
| "loss": 3.9452, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6789426892554396e-05, |
| "loss": 3.9573, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6781040945043876e-05, |
| "loss": 3.9553, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6772654997533356e-05, |
| "loss": 3.9647, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6764269050022836e-05, |
| "loss": 3.9546, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755899481316045e-05, |
| "loss": 3.9523, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6747513533805525e-05, |
| "loss": 3.9498, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6739127586295005e-05, |
| "loss": 3.9457, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6730741638784485e-05, |
| "loss": 3.9438, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722372070077694e-05, |
| "loss": 3.9537, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713986122567174e-05, |
| "loss": 3.963, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670560017505666e-05, |
| "loss": 3.9611, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.669723060634987e-05, |
| "loss": 3.953, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668884465883935e-05, |
| "loss": 3.9447, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668045871132883e-05, |
| "loss": 3.9478, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667207276381831e-05, |
| "loss": 3.9482, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.666368681630779e-05, |
| "loss": 3.9573, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665530086879727e-05, |
| "loss": 3.9552, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664691492128675e-05, |
| "loss": 3.9473, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663852897377623e-05, |
| "loss": 3.9376, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663015940506944e-05, |
| "loss": 3.9587, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662177345755892e-05, |
| "loss": 3.9363, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66133875100484e-05, |
| "loss": 3.9467, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660500156253788e-05, |
| "loss": 3.9397, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6596631993831094e-05, |
| "loss": 3.9446, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65882624251243e-05, |
| "loss": 3.9359, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657987647761378e-05, |
| "loss": 3.9447, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657149053010326e-05, |
| "loss": 3.936, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656310458259274e-05, |
| "loss": 3.9367, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655471863508222e-05, |
| "loss": 3.9474, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65463326875717e-05, |
| "loss": 3.9315, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6537946740061176e-05, |
| "loss": 3.9549, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529560792550656e-05, |
| "loss": 3.9524, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652119122384387e-05, |
| "loss": 3.9445, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651280527633335e-05, |
| "loss": 3.9369, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650441932882283e-05, |
| "loss": 3.9336, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649603338131231e-05, |
| "loss": 3.9352, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648766381260553e-05, |
| "loss": 3.9354, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479277865095e-05, |
| "loss": 3.9426, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647089191758448e-05, |
| "loss": 3.9261, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.646250597007396e-05, |
| "loss": 3.9374, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645413640136718e-05, |
| "loss": 3.9411, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644575045385665e-05, |
| "loss": 3.9379, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643736450634613e-05, |
| "loss": 3.9298, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642897855883561e-05, |
| "loss": 3.9321, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642060899012882e-05, |
| "loss": 3.9357, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6412223042618306e-05, |
| "loss": 3.9365, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6403837095107786e-05, |
| "loss": 3.938, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6395451147597266e-05, |
| "loss": 3.9427, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6387081578890475e-05, |
| "loss": 3.9188, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6378695631379955e-05, |
| "loss": 3.9358, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6370309683869435e-05, |
| "loss": 3.9372, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6361923736358915e-05, |
| "loss": 3.9319, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6353554167652124e-05, |
| "loss": 3.9297, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6345168220141604e-05, |
| "loss": 3.9416, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633679865143482e-05, |
| "loss": 3.9316, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632841270392429e-05, |
| "loss": 3.9469, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632002675641377e-05, |
| "loss": 3.9236, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631164080890326e-05, |
| "loss": 3.9321, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630325486139274e-05, |
| "loss": 3.9288, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629486891388222e-05, |
| "loss": 3.9222, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628649934517543e-05, |
| "loss": 3.9267, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.627811339766491e-05, |
| "loss": 3.922, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626972745015439e-05, |
| "loss": 3.9201, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626134150264387e-05, |
| "loss": 3.9239, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625295555513335e-05, |
| "loss": 3.9231, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9575917720794678, |
| "eval_runtime": 314.063, |
| "eval_samples_per_second": 1215.014, |
| "eval_steps_per_second": 37.97, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.624456960762283e-05, |
| "loss": 3.9228, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623618366011231e-05, |
| "loss": 3.9151, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622781409140552e-05, |
| "loss": 3.9298, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6219428143895e-05, |
| "loss": 3.9221, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621104219638448e-05, |
| "loss": 3.9399, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620265624887396e-05, |
| "loss": 3.9147, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619428668016717e-05, |
| "loss": 3.9202, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.618590073265665e-05, |
| "loss": 3.9068, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617751478514613e-05, |
| "loss": 3.9216, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616912883763561e-05, |
| "loss": 3.9181, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616075926892882e-05, |
| "loss": 3.9119, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61523733214183e-05, |
| "loss": 3.9192, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614400375271151e-05, |
| "loss": 3.9072, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.613561780520099e-05, |
| "loss": 3.921, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612723185769047e-05, |
| "loss": 3.9053, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611884591017995e-05, |
| "loss": 3.9044, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611045996266943e-05, |
| "loss": 3.905, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.610207401515891e-05, |
| "loss": 3.9112, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.609368806764839e-05, |
| "loss": 3.9075, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.608530212013788e-05, |
| "loss": 3.9276, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6076932551431087e-05, |
| "loss": 3.9086, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6068546603920566e-05, |
| "loss": 3.9177, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6060177035213776e-05, |
| "loss": 3.9075, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6051791087703256e-05, |
| "loss": 3.914, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6043405140192735e-05, |
| "loss": 3.9064, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6035019192682215e-05, |
| "loss": 3.9112, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6026633245171695e-05, |
| "loss": 3.9041, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6018247297661175e-05, |
| "loss": 3.9008, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6009861350150655e-05, |
| "loss": 3.8976, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.6001491781443864e-05, |
| "loss": 3.9059, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5993105833933344e-05, |
| "loss": 3.9092, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598471988642283e-05, |
| "loss": 3.9063, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597633393891231e-05, |
| "loss": 3.9164, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.596796437020552e-05, |
| "loss": 3.9058, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595959480149873e-05, |
| "loss": 3.9005, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595120885398821e-05, |
| "loss": 3.9064, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.594282290647769e-05, |
| "loss": 3.9055, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.593443695896717e-05, |
| "loss": 3.8898, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.592605101145665e-05, |
| "loss": 3.8859, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.591768144274986e-05, |
| "loss": 3.8902, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590929549523934e-05, |
| "loss": 3.898, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590090954772882e-05, |
| "loss": 3.9028, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.58925236002183e-05, |
| "loss": 3.8989, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5884137652707785e-05, |
| "loss": 3.8852, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.587575170519726e-05, |
| "loss": 3.9008, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.586736575768674e-05, |
| "loss": 3.8954, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585897981017622e-05, |
| "loss": 3.8932, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585061024146943e-05, |
| "loss": 3.8831, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.584222429395891e-05, |
| "loss": 3.8722, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.583383834644839e-05, |
| "loss": 3.9127, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582545239893787e-05, |
| "loss": 3.8991, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5817082830231076e-05, |
| "loss": 3.8909, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5808696882720556e-05, |
| "loss": 3.8924, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580032731401377e-05, |
| "loss": 3.8925, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579194136650325e-05, |
| "loss": 3.8765, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.578355541899273e-05, |
| "loss": 3.8941, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577516947148221e-05, |
| "loss": 3.8809, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576678352397169e-05, |
| "loss": 3.8902, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575839757646117e-05, |
| "loss": 3.9042, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575001162895065e-05, |
| "loss": 3.8822, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574162568144013e-05, |
| "loss": 3.8724, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573325611273334e-05, |
| "loss": 3.9005, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572487016522282e-05, |
| "loss": 3.8671, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.57164842177123e-05, |
| "loss": 3.8764, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.570811464900551e-05, |
| "loss": 3.8939, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569972870149499e-05, |
| "loss": 3.8871, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5691342753984476e-05, |
| "loss": 3.8798, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5682973185277685e-05, |
| "loss": 3.8737, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5674587237767165e-05, |
| "loss": 3.8682, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5666201290256645e-05, |
| "loss": 3.8745, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5657815342746125e-05, |
| "loss": 3.8885, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5649429395235605e-05, |
| "loss": 3.8708, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5641043447725085e-05, |
| "loss": 3.8905, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5632673879018294e-05, |
| "loss": 3.8834, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5624287931507774e-05, |
| "loss": 3.8909, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5615901983997254e-05, |
| "loss": 3.8741, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5607516036486734e-05, |
| "loss": 3.8738, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5599130088976214e-05, |
| "loss": 3.8831, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5590744141465694e-05, |
| "loss": 3.8721, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558237457275891e-05, |
| "loss": 3.8838, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557398862524839e-05, |
| "loss": 3.8749, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556560267773787e-05, |
| "loss": 3.8748, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555721673022735e-05, |
| "loss": 3.8784, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554883078271683e-05, |
| "loss": 3.8641, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554044483520631e-05, |
| "loss": 3.8723, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.553205888769579e-05, |
| "loss": 3.8738, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.552367294018526e-05, |
| "loss": 3.8815, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.551530337147848e-05, |
| "loss": 3.8698, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.550691742396796e-05, |
| "loss": 3.8726, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549853147645744e-05, |
| "loss": 3.8682, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549014552894691e-05, |
| "loss": 3.8626, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548175958143639e-05, |
| "loss": 3.8632, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.547337363392588e-05, |
| "loss": 3.8706, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.546498768641536e-05, |
| "loss": 3.8803, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.545660173890484e-05, |
| "loss": 3.8838, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544823217019805e-05, |
| "loss": 3.8735, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543984622268753e-05, |
| "loss": 3.861, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543146027517701e-05, |
| "loss": 3.8659, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5423090706470216e-05, |
| "loss": 3.8706, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5414704758959696e-05, |
| "loss": 3.8765, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5406318811449176e-05, |
| "loss": 3.8757, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5397932863938656e-05, |
| "loss": 3.8704, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5389546916428136e-05, |
| "loss": 3.8585, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5381177347721345e-05, |
| "loss": 3.8828, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.537279140021083e-05, |
| "loss": 3.8539, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.536440545270031e-05, |
| "loss": 3.871, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535601950518979e-05, |
| "loss": 3.8599, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.534763355767927e-05, |
| "loss": 3.8679, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533926398897248e-05, |
| "loss": 3.8579, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533087804146196e-05, |
| "loss": 3.8651, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532249209395144e-05, |
| "loss": 3.8592, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531410614644092e-05, |
| "loss": 3.8593, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.53057201989304e-05, |
| "loss": 3.8671, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.529733425141988e-05, |
| "loss": 3.8571, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528896468271309e-05, |
| "loss": 3.8744, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528057873520257e-05, |
| "loss": 3.8761, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527219278769205e-05, |
| "loss": 3.8703, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526380684018153e-05, |
| "loss": 3.8587, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5255420892671017e-05, |
| "loss": 3.8597, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5247034945160496e-05, |
| "loss": 3.8575, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238648997649976e-05, |
| "loss": 3.8583, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.523026305013945e-05, |
| "loss": 3.8683, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221893481432665e-05, |
| "loss": 3.845, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213507533922145e-05, |
| "loss": 3.8636, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5205121586411625e-05, |
| "loss": 3.8611, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51967356389011e-05, |
| "loss": 3.8667, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5188366070194314e-05, |
| "loss": 3.8536, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5179996501487524e-05, |
| "loss": 3.8582, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5171610553977003e-05, |
| "loss": 3.8549, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5163224606466483e-05, |
| "loss": 3.8685, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515483865895597e-05, |
| "loss": 3.859, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514645271144545e-05, |
| "loss": 3.8704, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513808314273866e-05, |
| "loss": 3.8451, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512969719522814e-05, |
| "loss": 3.8616, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512131124771762e-05, |
| "loss": 3.8634, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51129253002071e-05, |
| "loss": 3.855, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510455573150031e-05, |
| "loss": 3.8563, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509616978398979e-05, |
| "loss": 3.8665, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.508778383647927e-05, |
| "loss": 3.8614, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507939788896874e-05, |
| "loss": 3.8753, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507102832026196e-05, |
| "loss": 3.8487, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.506264237275144e-05, |
| "loss": 3.8618, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5054256425240924e-05, |
| "loss": 3.852, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.50458704777304e-05, |
| "loss": 3.8532, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503748453021988e-05, |
| "loss": 3.8576, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502911496151309e-05, |
| "loss": 3.8503, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502072901400257e-05, |
| "loss": 3.8487, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5012343066492046e-05, |
| "loss": 3.8482, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5003957118981526e-05, |
| "loss": 3.8535, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.9170939922332764, |
| "eval_runtime": 317.3907, |
| "eval_samples_per_second": 1202.275, |
| "eval_steps_per_second": 37.572, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4995571171471006e-05, |
| "loss": 3.8477, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4987185223960486e-05, |
| "loss": 3.8429, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4978799276449966e-05, |
| "loss": 3.8618, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4970413328939446e-05, |
| "loss": 3.851, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4962027381428926e-05, |
| "loss": 3.8663, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4953641433918406e-05, |
| "loss": 3.8479, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4945255486407886e-05, |
| "loss": 3.8482, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493686953889737e-05, |
| "loss": 3.8356, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492849997019058e-05, |
| "loss": 3.8504, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492011402268006e-05, |
| "loss": 3.8477, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.491172807516954e-05, |
| "loss": 3.8462, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.490334212765902e-05, |
| "loss": 3.8461, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.489497255895223e-05, |
| "loss": 3.8373, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488658661144171e-05, |
| "loss": 3.8526, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487820066393119e-05, |
| "loss": 3.8354, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486981471642067e-05, |
| "loss": 3.8349, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486142876891015e-05, |
| "loss": 3.8358, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485304282139963e-05, |
| "loss": 3.8464, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484465687388911e-05, |
| "loss": 3.8378, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483627092637859e-05, |
| "loss": 3.8572, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.482788497886807e-05, |
| "loss": 3.8415, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481949903135755e-05, |
| "loss": 3.8499, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481112946265076e-05, |
| "loss": 3.8395, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480274351514024e-05, |
| "loss": 3.8417, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479435756762972e-05, |
| "loss": 3.8392, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47859716201192e-05, |
| "loss": 3.8444, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477758567260868e-05, |
| "loss": 3.8362, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476919972509816e-05, |
| "loss": 3.8379, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476081377758764e-05, |
| "loss": 3.8296, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475244420888085e-05, |
| "loss": 3.8379, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474405826137033e-05, |
| "loss": 3.8439, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473567231385981e-05, |
| "loss": 3.8419, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4727286366349295e-05, |
| "loss": 3.8463, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4718900418838775e-05, |
| "loss": 3.836, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4710514471328255e-05, |
| "loss": 3.8366, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4702144902621464e-05, |
| "loss": 3.8454, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4693758955110944e-05, |
| "loss": 3.8418, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4685373007600424e-05, |
| "loss": 3.8195, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4676987060089903e-05, |
| "loss": 3.8259, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466861749138311e-05, |
| "loss": 3.8237, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466023154387259e-05, |
| "loss": 3.8283, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465184559636207e-05, |
| "loss": 3.8407, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464345964885155e-05, |
| "loss": 3.8305, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463507370134103e-05, |
| "loss": 3.8262, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.462670413263425e-05, |
| "loss": 3.8335, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461831818512373e-05, |
| "loss": 3.8268, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460993223761321e-05, |
| "loss": 3.8316, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460154629010269e-05, |
| "loss": 3.8215, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459316034259217e-05, |
| "loss": 3.8017, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458477439508165e-05, |
| "loss": 3.8499, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457638844757112e-05, |
| "loss": 3.8353, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45680025000606e-05, |
| "loss": 3.8325, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455963293135382e-05, |
| "loss": 3.827, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551263362647026e-05, |
| "loss": 3.8276, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542877415136506e-05, |
| "loss": 3.8155, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534491467625986e-05, |
| "loss": 3.8281, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4526105520115466e-05, |
| "loss": 3.8218, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451773595140868e-05, |
| "loss": 3.8242, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450935000389816e-05, |
| "loss": 3.8401, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450096405638764e-05, |
| "loss": 3.8177, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.449257810887712e-05, |
| "loss": 3.8133, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448420854017033e-05, |
| "loss": 3.8366, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447582259265981e-05, |
| "loss": 3.806, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446743664514929e-05, |
| "loss": 3.8164, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4459050697638764e-05, |
| "loss": 3.8284, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445068112893198e-05, |
| "loss": 3.8251, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444229518142146e-05, |
| "loss": 3.8167, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443390923391094e-05, |
| "loss": 3.8144, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442552328640042e-05, |
| "loss": 3.8041, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44171373388899e-05, |
| "loss": 3.8131, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440875139137938e-05, |
| "loss": 3.8246, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4400381822672595e-05, |
| "loss": 3.8135, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439199587516207e-05, |
| "loss": 3.8237, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438360992765155e-05, |
| "loss": 3.8243, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437522398014103e-05, |
| "loss": 3.8312, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436683803263051e-05, |
| "loss": 3.8134, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435846846392372e-05, |
| "loss": 3.8104, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43500825164132e-05, |
| "loss": 3.8221, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434169656890268e-05, |
| "loss": 3.8094, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433331062139216e-05, |
| "loss": 3.8279, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432494105268537e-05, |
| "loss": 3.8159, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431655510517485e-05, |
| "loss": 3.8144, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430816915766433e-05, |
| "loss": 3.8212, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429978321015381e-05, |
| "loss": 3.8054, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429139726264329e-05, |
| "loss": 3.8109, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.42830276939365e-05, |
| "loss": 3.8129, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427464174642598e-05, |
| "loss": 3.8226, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.426625579891546e-05, |
| "loss": 3.8072, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.425786985140494e-05, |
| "loss": 3.8136, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424948390389442e-05, |
| "loss": 3.8088, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424111433518763e-05, |
| "loss": 3.8094, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423272838767711e-05, |
| "loss": 3.8003, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.422434244016659e-05, |
| "loss": 3.8132, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421595649265607e-05, |
| "loss": 3.8213, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.420757054514556e-05, |
| "loss": 3.8213, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419920097643877e-05, |
| "loss": 3.8186, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419081502892825e-05, |
| "loss": 3.8015, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418242908141773e-05, |
| "loss": 3.8061, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417404313390721e-05, |
| "loss": 3.812, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4165673565200416e-05, |
| "loss": 3.8184, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4157287617689896e-05, |
| "loss": 3.8164, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4148901670179376e-05, |
| "loss": 3.8111, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4140515722668856e-05, |
| "loss": 3.8041, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4132129775158336e-05, |
| "loss": 3.8238, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4123760206451545e-05, |
| "loss": 3.7982, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4115374258941025e-05, |
| "loss": 3.8117, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.410698831143051e-05, |
| "loss": 3.8019, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409860236391999e-05, |
| "loss": 3.8112, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409021641640947e-05, |
| "loss": 3.7997, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408183046889895e-05, |
| "loss": 3.8099, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407346090019216e-05, |
| "loss": 3.7995, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406507495268164e-05, |
| "loss": 3.8061, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405668900517112e-05, |
| "loss": 3.8089, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40483030576606e-05, |
| "loss": 3.8022, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403993348895381e-05, |
| "loss": 3.816, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403154754144329e-05, |
| "loss": 3.8183, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402316159393277e-05, |
| "loss": 3.8102, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.401477564642225e-05, |
| "loss": 3.8078, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4006406077715465e-05, |
| "loss": 3.7998, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3998020130204945e-05, |
| "loss": 3.8045, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3989634182694425e-05, |
| "loss": 3.801, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3981248235183905e-05, |
| "loss": 3.8137, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397286228767338e-05, |
| "loss": 3.7871, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396447634016286e-05, |
| "loss": 3.8074, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3956106771456074e-05, |
| "loss": 3.8037, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394772082394555e-05, |
| "loss": 3.8101, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393933487643503e-05, |
| "loss": 3.801, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393094892892451e-05, |
| "loss": 3.8036, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392257936021772e-05, |
| "loss": 3.7984, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39141934127072e-05, |
| "loss": 3.8126, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.390580746519668e-05, |
| "loss": 3.8067, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.389742151768616e-05, |
| "loss": 3.8112, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388905194897937e-05, |
| "loss": 3.792, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388066600146885e-05, |
| "loss": 3.8077, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.387228005395833e-05, |
| "loss": 3.8083, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.386389410644781e-05, |
| "loss": 3.8002, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.385552453774102e-05, |
| "loss": 3.8022, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.38471385902305e-05, |
| "loss": 3.8104, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383875264271998e-05, |
| "loss": 3.8038, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383036669520946e-05, |
| "loss": 3.8236, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382199712650267e-05, |
| "loss": 3.7942, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3813611178992157e-05, |
| "loss": 3.8058, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3805225231481637e-05, |
| "loss": 3.7971, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3796839283971116e-05, |
| "loss": 3.7989, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3788453336460596e-05, |
| "loss": 3.8038, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3780083767753806e-05, |
| "loss": 3.7975, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3771697820243285e-05, |
| "loss": 3.7996, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3763311872732765e-05, |
| "loss": 3.7862, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3754925925222245e-05, |
| "loss": 3.804, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8917667865753174, |
| "eval_runtime": 304.2835, |
| "eval_samples_per_second": 1254.064, |
| "eval_steps_per_second": 39.19, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3746539977711725e-05, |
| "loss": 3.8007, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3738170409004934e-05, |
| "loss": 3.7878, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3729784461494414e-05, |
| "loss": 3.8045, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3721398513983894e-05, |
| "loss": 3.8023, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3713012566473374e-05, |
| "loss": 3.8089, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370464299776659e-05, |
| "loss": 3.7938, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.369625705025607e-05, |
| "loss": 3.7965, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368787110274555e-05, |
| "loss": 3.7848, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367948515523503e-05, |
| "loss": 3.7985, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367111558652824e-05, |
| "loss": 3.7956, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.366272963901772e-05, |
| "loss": 3.7965, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36543436915072e-05, |
| "loss": 3.7901, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.364597412280041e-05, |
| "loss": 3.7896, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363758817528989e-05, |
| "loss": 3.7923, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362920222777937e-05, |
| "loss": 3.7851, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362081628026885e-05, |
| "loss": 3.7796, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361243033275833e-05, |
| "loss": 3.785, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360404438524781e-05, |
| "loss": 3.7925, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359565843773729e-05, |
| "loss": 3.7855, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3587272490226775e-05, |
| "loss": 3.8048, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3578902921519984e-05, |
| "loss": 3.7923, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3570516974009464e-05, |
| "loss": 3.7994, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3562131026498944e-05, |
| "loss": 3.7866, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3553745078988424e-05, |
| "loss": 3.7892, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.354537551028163e-05, |
| "loss": 3.786, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.353698956277111e-05, |
| "loss": 3.7967, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352860361526059e-05, |
| "loss": 3.7864, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352021766775007e-05, |
| "loss": 3.7844, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351184809904328e-05, |
| "loss": 3.7797, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.350346215153276e-05, |
| "loss": 3.7886, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.349507620402224e-05, |
| "loss": 3.7898, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.348669025651173e-05, |
| "loss": 3.791, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347832068780494e-05, |
| "loss": 3.7941, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346993474029442e-05, |
| "loss": 3.7902, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.34615487927839e-05, |
| "loss": 3.786, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.345316284527338e-05, |
| "loss": 3.7947, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3444793276566586e-05, |
| "loss": 3.7896, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3436407329056066e-05, |
| "loss": 3.7747, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3428021381545546e-05, |
| "loss": 3.7753, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3419635434035026e-05, |
| "loss": 3.7711, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3411249486524506e-05, |
| "loss": 3.7762, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402879917817715e-05, |
| "loss": 3.7909, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3394493970307195e-05, |
| "loss": 3.7841, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.338610802279668e-05, |
| "loss": 3.7742, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3377722075286155e-05, |
| "loss": 3.7848, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3369336127775635e-05, |
| "loss": 3.7773, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3360950180265115e-05, |
| "loss": 3.7841, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.335258061155833e-05, |
| "loss": 3.7711, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3344194664047804e-05, |
| "loss": 3.7528, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3335808716537284e-05, |
| "loss": 3.8019, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3327422769026764e-05, |
| "loss": 3.7832, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3319036821516244e-05, |
| "loss": 3.7872, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331066725280945e-05, |
| "loss": 3.774, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330229768410267e-05, |
| "loss": 3.7829, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.329391173659215e-05, |
| "loss": 3.7655, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.328552578908163e-05, |
| "loss": 3.7765, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.327713984157111e-05, |
| "loss": 3.7768, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326875389406059e-05, |
| "loss": 3.775, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326036794655007e-05, |
| "loss": 3.7957, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325198199903955e-05, |
| "loss": 3.7686, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324359605152903e-05, |
| "loss": 3.7628, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.323522648282224e-05, |
| "loss": 3.79, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322684053531172e-05, |
| "loss": 3.7569, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32184545878012e-05, |
| "loss": 3.766, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321006864029068e-05, |
| "loss": 3.782, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320169907158389e-05, |
| "loss": 3.7817, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3193313124073373e-05, |
| "loss": 3.763, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.318492717656285e-05, |
| "loss": 3.7723, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.317654122905233e-05, |
| "loss": 3.7545, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.316817166034554e-05, |
| "loss": 3.7693, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.315978571283502e-05, |
| "loss": 3.7754, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.31513997653245e-05, |
| "loss": 3.764, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.314301381781398e-05, |
| "loss": 3.7775, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.313464424910719e-05, |
| "loss": 3.7752, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.312625830159667e-05, |
| "loss": 3.7866, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311787235408615e-05, |
| "loss": 3.764, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310948640657563e-05, |
| "loss": 3.77, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310110045906511e-05, |
| "loss": 3.7747, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.309273089035833e-05, |
| "loss": 3.763, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308434494284781e-05, |
| "loss": 3.7763, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.307595899533729e-05, |
| "loss": 3.7706, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.306757304782677e-05, |
| "loss": 3.7679, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3059203479119976e-05, |
| "loss": 3.7758, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3050817531609456e-05, |
| "loss": 3.7595, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3042431584098936e-05, |
| "loss": 3.7606, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3034045636588416e-05, |
| "loss": 3.7691, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3025676067881625e-05, |
| "loss": 3.7738, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3017290120371105e-05, |
| "loss": 3.764, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3008904172860585e-05, |
| "loss": 3.766, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3000518225350065e-05, |
| "loss": 3.7566, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.299214865664328e-05, |
| "loss": 3.7653, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.298376270913276e-05, |
| "loss": 3.7567, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.297537676162224e-05, |
| "loss": 3.7645, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.296699081411172e-05, |
| "loss": 3.7789, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295862124540493e-05, |
| "loss": 3.7696, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295023529789441e-05, |
| "loss": 3.7727, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294184935038389e-05, |
| "loss": 3.7575, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.293346340287337e-05, |
| "loss": 3.7608, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.292509383416658e-05, |
| "loss": 3.763, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291670788665606e-05, |
| "loss": 3.7759, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290832193914554e-05, |
| "loss": 3.7683, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289993599163502e-05, |
| "loss": 3.7725, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289156642292823e-05, |
| "loss": 3.7595, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2883180475417714e-05, |
| "loss": 3.7752, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2874794527907194e-05, |
| "loss": 3.7557, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2866408580396674e-05, |
| "loss": 3.7674, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2858039011689883e-05, |
| "loss": 3.7524, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284965306417936e-05, |
| "loss": 3.7702, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284126711666884e-05, |
| "loss": 3.7555, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283288116915832e-05, |
| "loss": 3.7618, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28244952216478e-05, |
| "loss": 3.7578, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.281612565294101e-05, |
| "loss": 3.7596, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.280773970543049e-05, |
| "loss": 3.761, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279935375791997e-05, |
| "loss": 3.7595, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279096781040945e-05, |
| "loss": 3.7692, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.278259824170267e-05, |
| "loss": 3.7714, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.277421229419215e-05, |
| "loss": 3.7676, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276582634668163e-05, |
| "loss": 3.7675, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275744039917111e-05, |
| "loss": 3.753, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274907083046432e-05, |
| "loss": 3.7584, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.27406848829538e-05, |
| "loss": 3.7603, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.273229893544328e-05, |
| "loss": 3.7671, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.272391298793276e-05, |
| "loss": 3.7441, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2715543419225966e-05, |
| "loss": 3.7621, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2707157471715446e-05, |
| "loss": 3.7629, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2698771524204926e-05, |
| "loss": 3.7613, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2690385576694406e-05, |
| "loss": 3.7577, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268201600798762e-05, |
| "loss": 3.7602, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26736300604771e-05, |
| "loss": 3.7579, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266524411296658e-05, |
| "loss": 3.7688, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2656858165456055e-05, |
| "loss": 3.7655, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264848859674927e-05, |
| "loss": 3.7671, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264010264923875e-05, |
| "loss": 3.7493, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.263171670172823e-05, |
| "loss": 3.7632, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2623330754217704e-05, |
| "loss": 3.7653, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.261496118551092e-05, |
| "loss": 3.756, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26065752380004e-05, |
| "loss": 3.7602, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.259818929048988e-05, |
| "loss": 3.7663, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258980334297936e-05, |
| "loss": 3.7632, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2581433774272575e-05, |
| "loss": 3.7751, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2573047826762055e-05, |
| "loss": 3.7555, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.256466187925153e-05, |
| "loss": 3.7617, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.255627593174101e-05, |
| "loss": 3.7563, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2547906363034224e-05, |
| "loss": 3.7582, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2539520415523704e-05, |
| "loss": 3.7557, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253113446801318e-05, |
| "loss": 3.7529, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252274852050266e-05, |
| "loss": 3.7591, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251437895179587e-05, |
| "loss": 3.7419, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.250599300428535e-05, |
| "loss": 3.7641, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8763163089752197, |
| "eval_runtime": 303.2875, |
| "eval_samples_per_second": 1258.182, |
| "eval_steps_per_second": 39.319, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2497607056774826e-05, |
| "loss": 3.7587, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248922110926431e-05, |
| "loss": 3.7461, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248083516175379e-05, |
| "loss": 3.7597, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247244921424327e-05, |
| "loss": 3.7584, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246406326673275e-05, |
| "loss": 3.7676, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245567731922223e-05, |
| "loss": 3.7518, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244729137171171e-05, |
| "loss": 3.7552, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243890542420119e-05, |
| "loss": 3.7416, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24305358554944e-05, |
| "loss": 3.7554, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242214990798388e-05, |
| "loss": 3.7548, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241376396047336e-05, |
| "loss": 3.7532, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.240537801296284e-05, |
| "loss": 3.7472, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239702482305978e-05, |
| "loss": 3.7468, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238863887554927e-05, |
| "loss": 3.7493, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238025292803875e-05, |
| "loss": 3.7473, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237186698052823e-05, |
| "loss": 3.7373, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236348103301771e-05, |
| "loss": 3.7427, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235509508550719e-05, |
| "loss": 3.7497, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234670913799667e-05, |
| "loss": 3.7461, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2338323190486147e-05, |
| "loss": 3.7605, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2329953621779356e-05, |
| "loss": 3.7535, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2321567674268836e-05, |
| "loss": 3.7593, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2313181726758316e-05, |
| "loss": 3.7408, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2304795779247796e-05, |
| "loss": 3.7478, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2296426210541005e-05, |
| "loss": 3.7458, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2288040263030485e-05, |
| "loss": 3.7551, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2279654315519965e-05, |
| "loss": 3.744, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227126836800945e-05, |
| "loss": 3.7474, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226289879930266e-05, |
| "loss": 3.7402, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225451285179214e-05, |
| "loss": 3.7435, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.224612690428162e-05, |
| "loss": 3.7451, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.22377409567711e-05, |
| "loss": 3.7547, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222937138806431e-05, |
| "loss": 3.7538, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222098544055379e-05, |
| "loss": 3.7509, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221259949304327e-05, |
| "loss": 3.7445, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220422992433648e-05, |
| "loss": 3.7485, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219584397682596e-05, |
| "loss": 3.7505, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.218745802931544e-05, |
| "loss": 3.7372, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217907208180492e-05, |
| "loss": 3.7323, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21706861342944e-05, |
| "loss": 3.7307, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2162300186783885e-05, |
| "loss": 3.7396, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2153914239273365e-05, |
| "loss": 3.7456, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214552829176284e-05, |
| "loss": 3.7437, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2137158723056054e-05, |
| "loss": 3.7363, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128772775545534e-05, |
| "loss": 3.739, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2120386828035014e-05, |
| "loss": 3.7391, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211200088052449e-05, |
| "loss": 3.7437, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21036313118177e-05, |
| "loss": 3.7313, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209524536430718e-05, |
| "loss": 3.7129, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208685941679666e-05, |
| "loss": 3.7568, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2078473469286136e-05, |
| "loss": 3.7417, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207010390057935e-05, |
| "loss": 3.7504, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206171795306884e-05, |
| "loss": 3.7376, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.205334838436205e-05, |
| "loss": 3.7413, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204496243685153e-05, |
| "loss": 3.7275, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203657648934101e-05, |
| "loss": 3.7359, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.202819054183049e-05, |
| "loss": 3.7401, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201980459431996e-05, |
| "loss": 3.7337, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201141864680944e-05, |
| "loss": 3.7555, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200303269929892e-05, |
| "loss": 3.7329, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19946467517884e-05, |
| "loss": 3.7243, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198627718308161e-05, |
| "loss": 3.7503, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197789123557109e-05, |
| "loss": 3.7189, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1969505288060576e-05, |
| "loss": 3.7219, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1961119340550056e-05, |
| "loss": 3.7467, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1952749771843265e-05, |
| "loss": 3.7431, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1944363824332745e-05, |
| "loss": 3.7229, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1935977876822225e-05, |
| "loss": 3.7347, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1927591929311705e-05, |
| "loss": 3.7157, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1919222360604914e-05, |
| "loss": 3.7319, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1910836413094394e-05, |
| "loss": 3.7342, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902450465583874e-05, |
| "loss": 3.7291, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1894064518073354e-05, |
| "loss": 3.7373, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188569494936656e-05, |
| "loss": 3.7362, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187730900185604e-05, |
| "loss": 3.7461, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186892305434553e-05, |
| "loss": 3.7297, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186053710683501e-05, |
| "loss": 3.7325, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185216753812822e-05, |
| "loss": 3.7347, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18437815906177e-05, |
| "loss": 3.7246, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183539564310718e-05, |
| "loss": 3.7369, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182700969559666e-05, |
| "loss": 3.7326, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181864012688987e-05, |
| "loss": 3.7269, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181025417937935e-05, |
| "loss": 3.7434, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180186823186883e-05, |
| "loss": 3.7167, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179349866316204e-05, |
| "loss": 3.7215, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178511271565152e-05, |
| "loss": 3.7325, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1776726768141e-05, |
| "loss": 3.7369, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1768340820630484e-05, |
| "loss": 3.7273, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.175997125192369e-05, |
| "loss": 3.7314, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175158530441317e-05, |
| "loss": 3.7169, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174319935690265e-05, |
| "loss": 3.7299, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173481340939213e-05, |
| "loss": 3.7189, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.172644384068534e-05, |
| "loss": 3.7276, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171805789317482e-05, |
| "loss": 3.7399, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.17096719456643e-05, |
| "loss": 3.7324, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170128599815378e-05, |
| "loss": 3.7349, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.169291642944699e-05, |
| "loss": 3.7216, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.168453048193647e-05, |
| "loss": 3.7245, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167614453442595e-05, |
| "loss": 3.7242, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166775858691544e-05, |
| "loss": 3.7401, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1659389018208646e-05, |
| "loss": 3.7318, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1651003070698126e-05, |
| "loss": 3.7348, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1642617123187606e-05, |
| "loss": 3.7199, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1634231175677086e-05, |
| "loss": 3.7353, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1625861606970295e-05, |
| "loss": 3.7219, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1617475659459775e-05, |
| "loss": 3.7295, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1609089711949255e-05, |
| "loss": 3.7156, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1600703764438735e-05, |
| "loss": 3.7306, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1592350574535674e-05, |
| "loss": 3.7182, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158396462702516e-05, |
| "loss": 3.728, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.157557867951464e-05, |
| "loss": 3.7167, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156719273200412e-05, |
| "loss": 3.7295, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15588067844936e-05, |
| "loss": 3.7216, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155042083698308e-05, |
| "loss": 3.7204, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154203488947256e-05, |
| "loss": 3.7346, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153364894196204e-05, |
| "loss": 3.7371, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152527937325525e-05, |
| "loss": 3.7313, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151689342574473e-05, |
| "loss": 3.726, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150850747823421e-05, |
| "loss": 3.722, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150012153072369e-05, |
| "loss": 3.7216, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14917519620169e-05, |
| "loss": 3.7241, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148336601450638e-05, |
| "loss": 3.7276, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147498006699586e-05, |
| "loss": 3.71, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1466594119485345e-05, |
| "loss": 3.7263, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1458224550778554e-05, |
| "loss": 3.728, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1449838603268034e-05, |
| "loss": 3.7257, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1441452655757514e-05, |
| "loss": 3.7224, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1433066708246994e-05, |
| "loss": 3.7221, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14246971395402e-05, |
| "loss": 3.7192, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141631119202968e-05, |
| "loss": 3.7329, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140792524451916e-05, |
| "loss": 3.7309, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139953929700864e-05, |
| "loss": 3.7298, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139116972830185e-05, |
| "loss": 3.7145, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138278378079133e-05, |
| "loss": 3.7279, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137439783328081e-05, |
| "loss": 3.7281, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136601188577029e-05, |
| "loss": 3.7203, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.135764231706351e-05, |
| "loss": 3.7257, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134925636955299e-05, |
| "loss": 3.7264, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134087042204247e-05, |
| "loss": 3.7256, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.133248447453195e-05, |
| "loss": 3.741, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1324114905825156e-05, |
| "loss": 3.7174, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1315728958314636e-05, |
| "loss": 3.7266, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1307343010804116e-05, |
| "loss": 3.7238, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1298957063293596e-05, |
| "loss": 3.7231, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290587494586805e-05, |
| "loss": 3.7173, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1282201547076285e-05, |
| "loss": 3.7172, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273815599565765e-05, |
| "loss": 3.722, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1265429652055245e-05, |
| "loss": 3.7107, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125704370454473e-05, |
| "loss": 3.7275, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8657584190368652, |
| "eval_runtime": 303.1251, |
| "eval_samples_per_second": 1258.857, |
| "eval_steps_per_second": 39.34, |
| "step": 534240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.124865775703421e-05, |
| "loss": 3.7247, |
| "step": 534528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1240271809523685e-05, |
| "loss": 3.7104, |
| "step": 535040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1231885862013165e-05, |
| "loss": 3.7234, |
| "step": 535552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1223499914502645e-05, |
| "loss": 3.7231, |
| "step": 536064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1215113966992125e-05, |
| "loss": 3.733, |
| "step": 536576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1206728019481605e-05, |
| "loss": 3.7165, |
| "step": 537088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1198342071971085e-05, |
| "loss": 3.7244, |
| "step": 537600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1189956124460565e-05, |
| "loss": 3.7109, |
| "step": 538112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1181586555753774e-05, |
| "loss": 3.7139, |
| "step": 538624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1173200608243254e-05, |
| "loss": 3.7251, |
| "step": 539136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1164814660732734e-05, |
| "loss": 3.7178, |
| "step": 539648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1156428713222214e-05, |
| "loss": 3.7107, |
| "step": 540160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.114807552331916e-05, |
| "loss": 3.7214, |
| "step": 540672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.113968957580864e-05, |
| "loss": 3.71, |
| "step": 541184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.113130362829812e-05, |
| "loss": 3.7068, |
| "step": 541696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.11229176807876e-05, |
| "loss": 3.7041, |
| "step": 542208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.111453173327708e-05, |
| "loss": 3.7072, |
| "step": 542720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.110614578576656e-05, |
| "loss": 3.7184, |
| "step": 543232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.109775983825604e-05, |
| "loss": 3.7044, |
| "step": 543744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108937389074552e-05, |
| "loss": 3.7281, |
| "step": 544256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108100432203873e-05, |
| "loss": 3.7192, |
| "step": 544768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.107261837452821e-05, |
| "loss": 3.7243, |
| "step": 545280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.106423242701769e-05, |
| "loss": 3.7106, |
| "step": 545792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.105584647950717e-05, |
| "loss": 3.7124, |
| "step": 546304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.104747691080038e-05, |
| "loss": 3.7172, |
| "step": 546816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103909096328986e-05, |
| "loss": 3.7186, |
| "step": 547328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103070501577934e-05, |
| "loss": 3.7111, |
| "step": 547840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.102231906826882e-05, |
| "loss": 3.7133, |
| "step": 548352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.101394949956203e-05, |
| "loss": 3.7026, |
| "step": 548864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.100556355205151e-05, |
| "loss": 3.7101, |
| "step": 549376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.099717760454099e-05, |
| "loss": 3.7115, |
| "step": 549888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098879165703047e-05, |
| "loss": 3.72, |
| "step": 550400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098042208832368e-05, |
| "loss": 3.7191, |
| "step": 550912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097203614081316e-05, |
| "loss": 3.7171, |
| "step": 551424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.096365019330264e-05, |
| "loss": 3.7126, |
| "step": 551936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.095526424579212e-05, |
| "loss": 3.7156, |
| "step": 552448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.094689467708534e-05, |
| "loss": 3.7158, |
| "step": 552960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.093850872957482e-05, |
| "loss": 3.7042, |
| "step": 553472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0930139160868026e-05, |
| "loss": 3.7014, |
| "step": 553984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0921753213357506e-05, |
| "loss": 3.6969, |
| "step": 554496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0913367265846986e-05, |
| "loss": 3.7043, |
| "step": 555008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0904981318336466e-05, |
| "loss": 3.7061, |
| "step": 555520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0896595370825946e-05, |
| "loss": 3.7137, |
| "step": 556032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0888209423315426e-05, |
| "loss": 3.7074, |
| "step": 556544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0879823475804906e-05, |
| "loss": 3.7045, |
| "step": 557056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0871453907098115e-05, |
| "loss": 3.7079, |
| "step": 557568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0863067959587595e-05, |
| "loss": 3.7125, |
| "step": 558080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0854682012077075e-05, |
| "loss": 3.6967, |
| "step": 558592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.084629606456656e-05, |
| "loss": 3.6817, |
| "step": 559104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.083792649585977e-05, |
| "loss": 3.7181, |
| "step": 559616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082954054834925e-05, |
| "loss": 3.7105, |
| "step": 560128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082115460083873e-05, |
| "loss": 3.7181, |
| "step": 560640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.081276865332821e-05, |
| "loss": 3.7043, |
| "step": 561152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.080439908462142e-05, |
| "loss": 3.7066, |
| "step": 561664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07960131371109e-05, |
| "loss": 3.6956, |
| "step": 562176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.078762718960038e-05, |
| "loss": 3.7025, |
| "step": 562688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077924124208986e-05, |
| "loss": 3.7109, |
| "step": 563200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077085529457934e-05, |
| "loss": 3.6959, |
| "step": 563712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.076246934706881e-05, |
| "loss": 3.7246, |
| "step": 564224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07540833995583e-05, |
| "loss": 3.7043, |
| "step": 564736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.074569745204778e-05, |
| "loss": 3.686, |
| "step": 565248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0737327883340995e-05, |
| "loss": 3.7185, |
| "step": 565760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072894193583047e-05, |
| "loss": 3.689, |
| "step": 566272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072055598831995e-05, |
| "loss": 3.6895, |
| "step": 566784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.071217004080943e-05, |
| "loss": 3.7133, |
| "step": 567296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0703800472102644e-05, |
| "loss": 3.7092, |
| "step": 567808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.069541452459212e-05, |
| "loss": 3.6922, |
| "step": 568320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.06870285770816e-05, |
| "loss": 3.7002, |
| "step": 568832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.067864262957108e-05, |
| "loss": 3.687, |
| "step": 569344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0670273060864286e-05, |
| "loss": 3.6957, |
| "step": 569856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0661887113353766e-05, |
| "loss": 3.7028, |
| "step": 570368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.065351754464698e-05, |
| "loss": 3.6959, |
| "step": 570880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.064513159713646e-05, |
| "loss": 3.7059, |
| "step": 571392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.063674564962594e-05, |
| "loss": 3.7005, |
| "step": 571904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.062835970211542e-05, |
| "loss": 3.7142, |
| "step": 572416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.06199737546049e-05, |
| "loss": 3.6965, |
| "step": 572928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.061160418589812e-05, |
| "loss": 3.7031, |
| "step": 573440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.060321823838759e-05, |
| "loss": 3.7005, |
| "step": 573952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.059483229087707e-05, |
| "loss": 3.6935, |
| "step": 574464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.058644634336655e-05, |
| "loss": 3.7064, |
| "step": 574976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.057806039585603e-05, |
| "loss": 3.7022, |
| "step": 575488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056967444834551e-05, |
| "loss": 3.6912, |
| "step": 576000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056128850083499e-05, |
| "loss": 3.7107, |
| "step": 576512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.055290255332447e-05, |
| "loss": 3.6852, |
| "step": 577024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.054453298461769e-05, |
| "loss": 3.6882, |
| "step": 577536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0536147037107167e-05, |
| "loss": 3.7028, |
| "step": 578048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0527761089596647e-05, |
| "loss": 3.7016, |
| "step": 578560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0519375142086127e-05, |
| "loss": 3.6955, |
| "step": 579072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0511005573379336e-05, |
| "loss": 3.7013, |
| "step": 579584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0502619625868816e-05, |
| "loss": 3.6854, |
| "step": 580096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0494233678358296e-05, |
| "loss": 3.6951, |
| "step": 580608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0485847730847775e-05, |
| "loss": 3.69, |
| "step": 581120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0477478162140985e-05, |
| "loss": 3.6941, |
| "step": 581632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0469092214630465e-05, |
| "loss": 3.7081, |
| "step": 582144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0460706267119944e-05, |
| "loss": 3.7007, |
| "step": 582656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0452320319609424e-05, |
| "loss": 3.7053, |
| "step": 583168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.044395075090264e-05, |
| "loss": 3.6895, |
| "step": 583680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.043556480339212e-05, |
| "loss": 3.6931, |
| "step": 584192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.04271788558816e-05, |
| "loss": 3.6927, |
| "step": 584704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.041879290837108e-05, |
| "loss": 3.7033, |
| "step": 585216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.041042333966429e-05, |
| "loss": 3.7001, |
| "step": 585728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.040203739215377e-05, |
| "loss": 3.7044, |
| "step": 586240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.039365144464325e-05, |
| "loss": 3.6915, |
| "step": 586752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.038526549713273e-05, |
| "loss": 3.6986, |
| "step": 587264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.037689592842594e-05, |
| "loss": 3.694, |
| "step": 587776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.036850998091542e-05, |
| "loss": 3.6966, |
| "step": 588288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.03601240334049e-05, |
| "loss": 3.6847, |
| "step": 588800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.035173808589438e-05, |
| "loss": 3.7037, |
| "step": 589312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.034338489599132e-05, |
| "loss": 3.6866, |
| "step": 589824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.03349989484808e-05, |
| "loss": 3.6944, |
| "step": 590336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.032661300097028e-05, |
| "loss": 3.6863, |
| "step": 590848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031822705345976e-05, |
| "loss": 3.6956, |
| "step": 591360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030984110594924e-05, |
| "loss": 3.6934, |
| "step": 591872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030145515843872e-05, |
| "loss": 3.6903, |
| "step": 592384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.02930692109282e-05, |
| "loss": 3.7008, |
| "step": 592896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.028468326341768e-05, |
| "loss": 3.7038, |
| "step": 593408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.027631369471089e-05, |
| "loss": 3.7064, |
| "step": 593920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.026792774720037e-05, |
| "loss": 3.6933, |
| "step": 594432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025954179968985e-05, |
| "loss": 3.6909, |
| "step": 594944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025115585217933e-05, |
| "loss": 3.6888, |
| "step": 595456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.024278628347255e-05, |
| "loss": 3.6923, |
| "step": 595968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.023440033596203e-05, |
| "loss": 3.6943, |
| "step": 596480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.022601438845151e-05, |
| "loss": 3.6815, |
| "step": 596992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.021762844094099e-05, |
| "loss": 3.6945, |
| "step": 597504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0209258872234197e-05, |
| "loss": 3.6992, |
| "step": 598016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0200872924723677e-05, |
| "loss": 3.6905, |
| "step": 598528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0192486977213157e-05, |
| "loss": 3.6923, |
| "step": 599040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0184101029702636e-05, |
| "loss": 3.6938, |
| "step": 599552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0175731460995846e-05, |
| "loss": 3.6881, |
| "step": 600064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0167345513485326e-05, |
| "loss": 3.7002, |
| "step": 600576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0158959565974805e-05, |
| "loss": 3.6979, |
| "step": 601088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0150573618464285e-05, |
| "loss": 3.7016, |
| "step": 601600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.01422040497575e-05, |
| "loss": 3.6826, |
| "step": 602112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.013381810224698e-05, |
| "loss": 3.6967, |
| "step": 602624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.012543215473646e-05, |
| "loss": 3.6965, |
| "step": 603136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.011704620722594e-05, |
| "loss": 3.6892, |
| "step": 603648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010867663851915e-05, |
| "loss": 3.6953, |
| "step": 604160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010030706981236e-05, |
| "loss": 3.6945, |
| "step": 604672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.009192112230184e-05, |
| "loss": 3.6959, |
| "step": 605184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.008353517479132e-05, |
| "loss": 3.7144, |
| "step": 605696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.00751492272808e-05, |
| "loss": 3.6826, |
| "step": 606208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.006676327977028e-05, |
| "loss": 3.6995, |
| "step": 606720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005837733225976e-05, |
| "loss": 3.6908, |
| "step": 607232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.004999138474924e-05, |
| "loss": 3.6926, |
| "step": 607744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0041621816042455e-05, |
| "loss": 3.688, |
| "step": 608256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0033235868531935e-05, |
| "loss": 3.6849, |
| "step": 608768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0024849921021415e-05, |
| "loss": 3.6933, |
| "step": 609280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0016463973510895e-05, |
| "loss": 3.6787, |
| "step": 609792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.000807802600037e-05, |
| "loss": 3.696, |
| "step": 610304 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.859010934829712, |
| "eval_runtime": 304.7053, |
| "eval_samples_per_second": 1252.328, |
| "eval_steps_per_second": 39.136, |
| "step": 610560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.999969207848985e-05, |
| "loss": 3.6903, |
| "step": 610816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.999130613097933e-05, |
| "loss": 3.683, |
| "step": 611328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.998292018346881e-05, |
| "loss": 3.6923, |
| "step": 611840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.997455061476202e-05, |
| "loss": 3.6941, |
| "step": 612352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.99661646672515e-05, |
| "loss": 3.7006, |
| "step": 612864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.995777871974098e-05, |
| "loss": 3.6879, |
| "step": 613376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.994939277223046e-05, |
| "loss": 3.6963, |
| "step": 613888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.994102320352367e-05, |
| "loss": 3.6793, |
| "step": 614400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.993263725601315e-05, |
| "loss": 3.6845, |
| "step": 614912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.992425130850263e-05, |
| "loss": 3.6964, |
| "step": 615424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.991586536099211e-05, |
| "loss": 3.6877, |
| "step": 615936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.990749579228532e-05, |
| "loss": 3.6788, |
| "step": 616448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.989912622357854e-05, |
| "loss": 3.6928, |
| "step": 616960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.989074027606802e-05, |
| "loss": 3.6769, |
| "step": 617472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.988235432855749e-05, |
| "loss": 3.6795, |
| "step": 617984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.987396838104697e-05, |
| "loss": 3.6697, |
| "step": 618496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.986558243353645e-05, |
| "loss": 3.6802, |
| "step": 619008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.985719648602593e-05, |
| "loss": 3.6901, |
| "step": 619520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.984881053851541e-05, |
| "loss": 3.6755, |
| "step": 620032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.984042459100489e-05, |
| "loss": 3.6963, |
| "step": 620544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9832055022298106e-05, |
| "loss": 3.6926, |
| "step": 621056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9823669074787586e-05, |
| "loss": 3.6921, |
| "step": 621568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9815283127277066e-05, |
| "loss": 3.6842, |
| "step": 622080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9806897179766546e-05, |
| "loss": 3.6831, |
| "step": 622592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9798527611059755e-05, |
| "loss": 3.6867, |
| "step": 623104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9790141663549235e-05, |
| "loss": 3.688, |
| "step": 623616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9781755716038715e-05, |
| "loss": 3.6807, |
| "step": 624128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9773369768528195e-05, |
| "loss": 3.6866, |
| "step": 624640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9765000199821404e-05, |
| "loss": 3.6733, |
| "step": 625152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9756614252310884e-05, |
| "loss": 3.6812, |
| "step": 625664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9748228304800364e-05, |
| "loss": 3.6817, |
| "step": 626176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9739842357289844e-05, |
| "loss": 3.6862, |
| "step": 626688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.973147278858306e-05, |
| "loss": 3.6902, |
| "step": 627200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.972308684107254e-05, |
| "loss": 3.6915, |
| "step": 627712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.971470089356202e-05, |
| "loss": 3.6775, |
| "step": 628224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.97063149460515e-05, |
| "loss": 3.6857, |
| "step": 628736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.969794537734471e-05, |
| "loss": 3.6917, |
| "step": 629248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.968955942983419e-05, |
| "loss": 3.6718, |
| "step": 629760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.96811898611274e-05, |
| "loss": 3.6749, |
| "step": 630272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.967280391361688e-05, |
| "loss": 3.6702, |
| "step": 630784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.966441796610636e-05, |
| "loss": 3.6722, |
| "step": 631296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.965603201859584e-05, |
| "loss": 3.6785, |
| "step": 631808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.964764607108532e-05, |
| "loss": 3.6866, |
| "step": 632320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.96392601235748e-05, |
| "loss": 3.6779, |
| "step": 632832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.963087417606428e-05, |
| "loss": 3.6748, |
| "step": 633344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9622488228553764e-05, |
| "loss": 3.6748, |
| "step": 633856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9614118659846974e-05, |
| "loss": 3.6874, |
| "step": 634368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9605732712336454e-05, |
| "loss": 3.6669, |
| "step": 634880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9597346764825933e-05, |
| "loss": 3.6557, |
| "step": 635392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.958897719611914e-05, |
| "loss": 3.6881, |
| "step": 635904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.958059124860862e-05, |
| "loss": 3.6813, |
| "step": 636416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.95722053010981e-05, |
| "loss": 3.6883, |
| "step": 636928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.956381935358758e-05, |
| "loss": 3.6753, |
| "step": 637440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.955544978488079e-05, |
| "loss": 3.6772, |
| "step": 637952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.954706383737027e-05, |
| "loss": 3.6697, |
| "step": 638464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.953867788985975e-05, |
| "loss": 3.6754, |
| "step": 638976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.953029194234923e-05, |
| "loss": 3.6839, |
| "step": 639488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.952190599483872e-05, |
| "loss": 3.6654, |
| "step": 640000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.95135200473282e-05, |
| "loss": 3.6992, |
| "step": 640512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.950513409981768e-05, |
| "loss": 3.6727, |
| "step": 641024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.949674815230715e-05, |
| "loss": 3.6572, |
| "step": 641536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.948837858360037e-05, |
| "loss": 3.6902, |
| "step": 642048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.947999263608985e-05, |
| "loss": 3.6673, |
| "step": 642560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.947160668857933e-05, |
| "loss": 3.6583, |
| "step": 643072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.94632207410688e-05, |
| "loss": 3.6828, |
| "step": 643584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9454851172362016e-05, |
| "loss": 3.6822, |
| "step": 644096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9446481603655225e-05, |
| "loss": 3.666, |
| "step": 644608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9438095656144705e-05, |
| "loss": 3.6706, |
| "step": 645120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9429709708634185e-05, |
| "loss": 3.6555, |
| "step": 645632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.942132376112367e-05, |
| "loss": 3.6678, |
| "step": 646144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.941293781361315e-05, |
| "loss": 3.6774, |
| "step": 646656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.940456824490636e-05, |
| "loss": 3.6682, |
| "step": 647168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.939618229739584e-05, |
| "loss": 3.6753, |
| "step": 647680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.938779634988532e-05, |
| "loss": 3.6706, |
| "step": 648192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.93794104023748e-05, |
| "loss": 3.6862, |
| "step": 648704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.937104083366801e-05, |
| "loss": 3.6684, |
| "step": 649216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.936265488615749e-05, |
| "loss": 3.6759, |
| "step": 649728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.935426893864697e-05, |
| "loss": 3.6721, |
| "step": 650240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.934588299113644e-05, |
| "loss": 3.6672, |
| "step": 650752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.933749704362592e-05, |
| "loss": 3.6797, |
| "step": 651264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.932912747491914e-05, |
| "loss": 3.6725, |
| "step": 651776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9320741527408625e-05, |
| "loss": 3.6682, |
| "step": 652288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.93123555798981e-05, |
| "loss": 3.6817, |
| "step": 652800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.930396963238758e-05, |
| "loss": 3.6583, |
| "step": 653312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9295600063680794e-05, |
| "loss": 3.6627, |
| "step": 653824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9287214116170274e-05, |
| "loss": 3.6713, |
| "step": 654336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.927882816865975e-05, |
| "loss": 3.676, |
| "step": 654848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.927044222114923e-05, |
| "loss": 3.6625, |
| "step": 655360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.926205627363871e-05, |
| "loss": 3.6764, |
| "step": 655872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.925367032612819e-05, |
| "loss": 3.6628, |
| "step": 656384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.924528437861767e-05, |
| "loss": 3.6653, |
| "step": 656896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.923689843110715e-05, |
| "loss": 3.6596, |
| "step": 657408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.922852886240036e-05, |
| "loss": 3.6668, |
| "step": 657920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.922014291488984e-05, |
| "loss": 3.681, |
| "step": 658432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.921175696737932e-05, |
| "loss": 3.671, |
| "step": 658944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.92033710198688e-05, |
| "loss": 3.6767, |
| "step": 659456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.919500145116201e-05, |
| "loss": 3.6641, |
| "step": 659968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.918661550365149e-05, |
| "loss": 3.6636, |
| "step": 660480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.91782459349447e-05, |
| "loss": 3.665, |
| "step": 660992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.916985998743418e-05, |
| "loss": 3.6758, |
| "step": 661504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.916147403992366e-05, |
| "loss": 3.6723, |
| "step": 662016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.915308809241314e-05, |
| "loss": 3.6744, |
| "step": 662528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.914470214490262e-05, |
| "loss": 3.6706, |
| "step": 663040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.91363161973921e-05, |
| "loss": 3.6684, |
| "step": 663552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.912793024988158e-05, |
| "loss": 3.6678, |
| "step": 664064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.911954430237106e-05, |
| "loss": 3.6673, |
| "step": 664576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.911117473366428e-05, |
| "loss": 3.6588, |
| "step": 665088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.910278878615376e-05, |
| "loss": 3.6734, |
| "step": 665600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.909440283864324e-05, |
| "loss": 3.6606, |
| "step": 666112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.908601689113272e-05, |
| "loss": 3.666, |
| "step": 666624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.90776309436222e-05, |
| "loss": 3.6629, |
| "step": 667136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.906924499611168e-05, |
| "loss": 3.6634, |
| "step": 667648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9060859048601157e-05, |
| "loss": 3.6674, |
| "step": 668160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9052473101090637e-05, |
| "loss": 3.6625, |
| "step": 668672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9044103532383846e-05, |
| "loss": 3.6767, |
| "step": 669184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9035717584873326e-05, |
| "loss": 3.6778, |
| "step": 669696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9027331637362806e-05, |
| "loss": 3.6753, |
| "step": 670208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.901894568985228e-05, |
| "loss": 3.664, |
| "step": 670720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9010576121145495e-05, |
| "loss": 3.6681, |
| "step": 671232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.900219017363498e-05, |
| "loss": 3.6614, |
| "step": 671744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.899380422612446e-05, |
| "loss": 3.6677, |
| "step": 672256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.898543465741767e-05, |
| "loss": 3.6691, |
| "step": 672768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.897704870990715e-05, |
| "loss": 3.6543, |
| "step": 673280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.896866276239663e-05, |
| "loss": 3.666, |
| "step": 673792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.896027681488611e-05, |
| "loss": 3.6703, |
| "step": 674304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8951890867375583e-05, |
| "loss": 3.6683, |
| "step": 674816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8943504919865063e-05, |
| "loss": 3.6622, |
| "step": 675328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.893511897235454e-05, |
| "loss": 3.6641, |
| "step": 675840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.892673302484402e-05, |
| "loss": 3.6608, |
| "step": 676352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.891836345613723e-05, |
| "loss": 3.6754, |
| "step": 676864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.890997750862672e-05, |
| "loss": 3.6671, |
| "step": 677376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.89015915611162e-05, |
| "loss": 3.6786, |
| "step": 677888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.889320561360568e-05, |
| "loss": 3.6571, |
| "step": 678400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.888483604489889e-05, |
| "loss": 3.6661, |
| "step": 678912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.887645009738837e-05, |
| "loss": 3.6722, |
| "step": 679424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.886806414987785e-05, |
| "loss": 3.6651, |
| "step": 679936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.885967820236733e-05, |
| "loss": 3.666, |
| "step": 680448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.885130863366054e-05, |
| "loss": 3.6693, |
| "step": 680960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.884292268615002e-05, |
| "loss": 3.6683, |
| "step": 681472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.88345367386395e-05, |
| "loss": 3.6852, |
| "step": 681984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.882615079112898e-05, |
| "loss": 3.6624, |
| "step": 682496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.881776484361846e-05, |
| "loss": 3.6708, |
| "step": 683008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.880939527491167e-05, |
| "loss": 3.6681, |
| "step": 683520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.880100932740115e-05, |
| "loss": 3.6625, |
| "step": 684032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.879262337989063e-05, |
| "loss": 3.664, |
| "step": 684544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.878423743238011e-05, |
| "loss": 3.6587, |
| "step": 685056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.877585148486959e-05, |
| "loss": 3.6659, |
| "step": 685568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.876746553735907e-05, |
| "loss": 3.6577, |
| "step": 686080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.875907958984855e-05, |
| "loss": 3.6663, |
| "step": 686592 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8544273376464844, |
| "eval_runtime": 304.6113, |
| "eval_samples_per_second": 1252.715, |
| "eval_steps_per_second": 39.148, |
| "step": 686880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.875069364233803e-05, |
| "loss": 3.6651, |
| "step": 687104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.874230769482751e-05, |
| "loss": 3.6559, |
| "step": 687616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.873392174731699e-05, |
| "loss": 3.6619, |
| "step": 688128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8725535799806466e-05, |
| "loss": 3.6707, |
| "step": 688640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8717149852295946e-05, |
| "loss": 3.6785, |
| "step": 689152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8708763904785425e-05, |
| "loss": 3.6602, |
| "step": 689664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8700377957274905e-05, |
| "loss": 3.6649, |
| "step": 690176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8691992009764385e-05, |
| "loss": 3.6564, |
| "step": 690688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.86836224410576e-05, |
| "loss": 3.6584, |
| "step": 691200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.867523649354708e-05, |
| "loss": 3.6665, |
| "step": 691712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.866685054603656e-05, |
| "loss": 3.6644, |
| "step": 692224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.865846459852604e-05, |
| "loss": 3.6552, |
| "step": 692736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8650111408622986e-05, |
| "loss": 3.6685, |
| "step": 693248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8641725461112466e-05, |
| "loss": 3.65, |
| "step": 693760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.863333951360194e-05, |
| "loss": 3.6533, |
| "step": 694272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.862495356609142e-05, |
| "loss": 3.6421, |
| "step": 694784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.86165676185809e-05, |
| "loss": 3.6586, |
| "step": 695296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.860818167107038e-05, |
| "loss": 3.6643, |
| "step": 695808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.859979572355986e-05, |
| "loss": 3.6493, |
| "step": 696320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.859140977604934e-05, |
| "loss": 3.6676, |
| "step": 696832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8583040207342555e-05, |
| "loss": 3.6652, |
| "step": 697344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8574654259832035e-05, |
| "loss": 3.6683, |
| "step": 697856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8566268312321515e-05, |
| "loss": 3.6569, |
| "step": 698368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8557882364810995e-05, |
| "loss": 3.6575, |
| "step": 698880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8549512796104204e-05, |
| "loss": 3.6594, |
| "step": 699392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.854114322739741e-05, |
| "loss": 3.6664, |
| "step": 699904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.853275727988689e-05, |
| "loss": 3.6497, |
| "step": 700416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.852437133237637e-05, |
| "loss": 3.6619, |
| "step": 700928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.851598538486585e-05, |
| "loss": 3.647, |
| "step": 701440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.850759943735533e-05, |
| "loss": 3.6541, |
| "step": 701952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.849921348984481e-05, |
| "loss": 3.6596, |
| "step": 702464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.849084392113803e-05, |
| "loss": 3.6645, |
| "step": 702976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.848245797362751e-05, |
| "loss": 3.6577, |
| "step": 703488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.847407202611699e-05, |
| "loss": 3.6695, |
| "step": 704000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.846568607860647e-05, |
| "loss": 3.6539, |
| "step": 704512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.845730013109595e-05, |
| "loss": 3.6598, |
| "step": 705024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.844891418358543e-05, |
| "loss": 3.6622, |
| "step": 705536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.844052823607491e-05, |
| "loss": 3.6439, |
| "step": 706048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.843215866736812e-05, |
| "loss": 3.6489, |
| "step": 706560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.84237727198576e-05, |
| "loss": 3.6494, |
| "step": 707072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.841538677234708e-05, |
| "loss": 3.6455, |
| "step": 707584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.840700082483656e-05, |
| "loss": 3.6509, |
| "step": 708096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.839861487732604e-05, |
| "loss": 3.6633, |
| "step": 708608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.839022892981552e-05, |
| "loss": 3.6507, |
| "step": 709120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8381842982305e-05, |
| "loss": 3.6503, |
| "step": 709632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.837345703479448e-05, |
| "loss": 3.6511, |
| "step": 710144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.836508746608769e-05, |
| "loss": 3.6585, |
| "step": 710656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.835670151857717e-05, |
| "loss": 3.6421, |
| "step": 711168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.834833194987038e-05, |
| "loss": 3.6317, |
| "step": 711680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.833994600235986e-05, |
| "loss": 3.664, |
| "step": 712192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.833156005484934e-05, |
| "loss": 3.656, |
| "step": 712704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.832317410733882e-05, |
| "loss": 3.6663, |
| "step": 713216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.83147881598283e-05, |
| "loss": 3.6472, |
| "step": 713728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.830641859112151e-05, |
| "loss": 3.652, |
| "step": 714240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.829803264361099e-05, |
| "loss": 3.6436, |
| "step": 714752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828964669610047e-05, |
| "loss": 3.6469, |
| "step": 715264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828126074858995e-05, |
| "loss": 3.664, |
| "step": 715776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.827289117988317e-05, |
| "loss": 3.6374, |
| "step": 716288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.826450523237265e-05, |
| "loss": 3.6746, |
| "step": 716800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.825611928486213e-05, |
| "loss": 3.6525, |
| "step": 717312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.82477333373516e-05, |
| "loss": 3.6312, |
| "step": 717824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.823934738984108e-05, |
| "loss": 3.6602, |
| "step": 718336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8230977821134296e-05, |
| "loss": 3.6454, |
| "step": 718848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8222591873623776e-05, |
| "loss": 3.6333, |
| "step": 719360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.821420592611325e-05, |
| "loss": 3.6556, |
| "step": 719872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.820581997860273e-05, |
| "loss": 3.66, |
| "step": 720384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.819743403109221e-05, |
| "loss": 3.6394, |
| "step": 720896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.818904808358169e-05, |
| "loss": 3.6476, |
| "step": 721408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.818066213607117e-05, |
| "loss": 3.6298, |
| "step": 721920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.817227618856065e-05, |
| "loss": 3.6437, |
| "step": 722432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8163906619853864e-05, |
| "loss": 3.6505, |
| "step": 722944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8155537051147074e-05, |
| "loss": 3.6412, |
| "step": 723456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8147151103636554e-05, |
| "loss": 3.6549, |
| "step": 723968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8138765156126033e-05, |
| "loss": 3.6417, |
| "step": 724480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8130379208615513e-05, |
| "loss": 3.6666, |
| "step": 724992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.812200963990872e-05, |
| "loss": 3.6438, |
| "step": 725504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.81136236923982e-05, |
| "loss": 3.6539, |
| "step": 726016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.810523774488768e-05, |
| "loss": 3.6449, |
| "step": 726528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.809685179737716e-05, |
| "loss": 3.642, |
| "step": 727040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.808846584986664e-05, |
| "loss": 3.6539, |
| "step": 727552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.808009628115986e-05, |
| "loss": 3.6467, |
| "step": 728064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.807171033364934e-05, |
| "loss": 3.6494, |
| "step": 728576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.806332438613882e-05, |
| "loss": 3.6527, |
| "step": 729088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.80549384386283e-05, |
| "loss": 3.6416, |
| "step": 729600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.804655249111778e-05, |
| "loss": 3.6326, |
| "step": 730112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.803818292241099e-05, |
| "loss": 3.6442, |
| "step": 730624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802979697490047e-05, |
| "loss": 3.6535, |
| "step": 731136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802141102738995e-05, |
| "loss": 3.6401, |
| "step": 731648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.801302507987943e-05, |
| "loss": 3.6484, |
| "step": 732160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.800463913236891e-05, |
| "loss": 3.637, |
| "step": 732672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.799625318485839e-05, |
| "loss": 3.6415, |
| "step": 733184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.798786723734787e-05, |
| "loss": 3.6367, |
| "step": 733696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7979497668641076e-05, |
| "loss": 3.6438, |
| "step": 734208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7971111721130556e-05, |
| "loss": 3.6539, |
| "step": 734720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.796274215242377e-05, |
| "loss": 3.6475, |
| "step": 735232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.795435620491325e-05, |
| "loss": 3.6537, |
| "step": 735744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.794597025740273e-05, |
| "loss": 3.6388, |
| "step": 736256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.793758430989221e-05, |
| "loss": 3.6405, |
| "step": 736768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.792919836238169e-05, |
| "loss": 3.6413, |
| "step": 737280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.79208287936749e-05, |
| "loss": 3.6514, |
| "step": 737792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.791244284616438e-05, |
| "loss": 3.6498, |
| "step": 738304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.790405689865386e-05, |
| "loss": 3.6518, |
| "step": 738816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.789567095114334e-05, |
| "loss": 3.6445, |
| "step": 739328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.788728500363282e-05, |
| "loss": 3.6411, |
| "step": 739840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.787891543492603e-05, |
| "loss": 3.6498, |
| "step": 740352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.787052948741551e-05, |
| "loss": 3.642, |
| "step": 740864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.786214353990499e-05, |
| "loss": 3.6373, |
| "step": 741376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7853757592394476e-05, |
| "loss": 3.6481, |
| "step": 741888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7845371644883956e-05, |
| "loss": 3.6375, |
| "step": 742400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7837002076177165e-05, |
| "loss": 3.6398, |
| "step": 742912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7828616128666645e-05, |
| "loss": 3.638, |
| "step": 743424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7820230181156125e-05, |
| "loss": 3.6375, |
| "step": 743936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7811844233645605e-05, |
| "loss": 3.6464, |
| "step": 744448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7803458286135085e-05, |
| "loss": 3.6383, |
| "step": 744960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.779507233862456e-05, |
| "loss": 3.649, |
| "step": 745472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.778668639111404e-05, |
| "loss": 3.6527, |
| "step": 745984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7778316822407254e-05, |
| "loss": 3.6538, |
| "step": 746496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7769930874896734e-05, |
| "loss": 3.6415, |
| "step": 747008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7761544927386214e-05, |
| "loss": 3.6449, |
| "step": 747520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7753158979875694e-05, |
| "loss": 3.6345, |
| "step": 748032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7744773032365174e-05, |
| "loss": 3.6458, |
| "step": 748544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7736387084854654e-05, |
| "loss": 3.6449, |
| "step": 749056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.772801751614786e-05, |
| "loss": 3.6302, |
| "step": 749568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.771963156863734e-05, |
| "loss": 3.6415, |
| "step": 750080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.771124562112682e-05, |
| "loss": 3.6441, |
| "step": 750592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.77028596736163e-05, |
| "loss": 3.6474, |
| "step": 751104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.769447372610578e-05, |
| "loss": 3.6396, |
| "step": 751616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.768608777859526e-05, |
| "loss": 3.6391, |
| "step": 752128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.767770183108474e-05, |
| "loss": 3.6363, |
| "step": 752640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.766933226237795e-05, |
| "loss": 3.6542, |
| "step": 753152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.766094631486743e-05, |
| "loss": 3.6454, |
| "step": 753664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.765256036735691e-05, |
| "loss": 3.6524, |
| "step": 754176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.76441744198464e-05, |
| "loss": 3.6357, |
| "step": 754688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.763578847233588e-05, |
| "loss": 3.6436, |
| "step": 755200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.762740252482536e-05, |
| "loss": 3.6483, |
| "step": 755712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.761901657731484e-05, |
| "loss": 3.6397, |
| "step": 756224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.761063062980432e-05, |
| "loss": 3.6442, |
| "step": 756736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7602277439901257e-05, |
| "loss": 3.641, |
| "step": 757248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7593891492390737e-05, |
| "loss": 3.6495, |
| "step": 757760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7585505544880216e-05, |
| "loss": 3.6544, |
| "step": 758272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7577119597369696e-05, |
| "loss": 3.6421, |
| "step": 758784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7568733649859176e-05, |
| "loss": 3.6442, |
| "step": 759296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7560364081152385e-05, |
| "loss": 3.6444, |
| "step": 759808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7551978133641865e-05, |
| "loss": 3.6401, |
| "step": 760320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.754359218613135e-05, |
| "loss": 3.6404, |
| "step": 760832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.753520623862083e-05, |
| "loss": 3.6343, |
| "step": 761344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.752682029111031e-05, |
| "loss": 3.641, |
| "step": 761856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.751843434359979e-05, |
| "loss": 3.6347, |
| "step": 762368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.751004839608927e-05, |
| "loss": 3.6421, |
| "step": 762880 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8514771461486816, |
| "eval_runtime": 302.4908, |
| "eval_samples_per_second": 1261.496, |
| "eval_steps_per_second": 39.423, |
| "step": 763200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.750167882738248e-05, |
| "loss": 3.6422, |
| "step": 763392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.749329287987196e-05, |
| "loss": 3.6318, |
| "step": 763904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.748490693236144e-05, |
| "loss": 3.6378, |
| "step": 764416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.747652098485092e-05, |
| "loss": 3.6473, |
| "step": 764928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7468135037340394e-05, |
| "loss": 3.6552, |
| "step": 765440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745976546863361e-05, |
| "loss": 3.637, |
| "step": 765952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745137952112309e-05, |
| "loss": 3.646, |
| "step": 766464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.744299357361257e-05, |
| "loss": 3.6348, |
| "step": 766976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.743460762610205e-05, |
| "loss": 3.634, |
| "step": 767488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.742622167859153e-05, |
| "loss": 3.6389, |
| "step": 768000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7417852109884746e-05, |
| "loss": 3.6433, |
| "step": 768512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.740946616237422e-05, |
| "loss": 3.6326, |
| "step": 769024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7401096593667435e-05, |
| "loss": 3.6436, |
| "step": 769536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7392710646156915e-05, |
| "loss": 3.6273, |
| "step": 770048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7384324698646395e-05, |
| "loss": 3.6288, |
| "step": 770560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.737593875113587e-05, |
| "loss": 3.6179, |
| "step": 771072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.736755280362535e-05, |
| "loss": 3.6353, |
| "step": 771584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.735916685611483e-05, |
| "loss": 3.6388, |
| "step": 772096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.735078090860431e-05, |
| "loss": 3.626, |
| "step": 772608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.734239496109379e-05, |
| "loss": 3.6426, |
| "step": 773120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.733400901358327e-05, |
| "loss": 3.6461, |
| "step": 773632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7325623066072754e-05, |
| "loss": 3.6456, |
| "step": 774144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7317237118562234e-05, |
| "loss": 3.6351, |
| "step": 774656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7308851171051714e-05, |
| "loss": 3.6316, |
| "step": 775168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.730048160234492e-05, |
| "loss": 3.6391, |
| "step": 775680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.729211203363813e-05, |
| "loss": 3.6422, |
| "step": 776192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.728372608612761e-05, |
| "loss": 3.6272, |
| "step": 776704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.727534013861709e-05, |
| "loss": 3.6379, |
| "step": 777216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.726695419110657e-05, |
| "loss": 3.6281, |
| "step": 777728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.725856824359605e-05, |
| "loss": 3.6269, |
| "step": 778240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.725018229608553e-05, |
| "loss": 3.6376, |
| "step": 778752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.724181272737874e-05, |
| "loss": 3.64, |
| "step": 779264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.723342677986822e-05, |
| "loss": 3.6376, |
| "step": 779776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.722504083235771e-05, |
| "loss": 3.6456, |
| "step": 780288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.721665488484719e-05, |
| "loss": 3.6331, |
| "step": 780800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.720826893733667e-05, |
| "loss": 3.639, |
| "step": 781312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.719988298982615e-05, |
| "loss": 3.6436, |
| "step": 781824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.719149704231563e-05, |
| "loss": 3.6226, |
| "step": 782336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.718312747360884e-05, |
| "loss": 3.6246, |
| "step": 782848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.717474152609832e-05, |
| "loss": 3.6273, |
| "step": 783360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.71663555785878e-05, |
| "loss": 3.6178, |
| "step": 783872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.715796963107728e-05, |
| "loss": 3.6294, |
| "step": 784384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.714958368356676e-05, |
| "loss": 3.6427, |
| "step": 784896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.714119773605623e-05, |
| "loss": 3.6277, |
| "step": 785408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.713281178854571e-05, |
| "loss": 3.628, |
| "step": 785920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7124442219838926e-05, |
| "loss": 3.6318, |
| "step": 786432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7116056272328406e-05, |
| "loss": 3.634, |
| "step": 786944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7107670324817886e-05, |
| "loss": 3.6174, |
| "step": 787456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7099284377307366e-05, |
| "loss": 3.6102, |
| "step": 787968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7090898429796846e-05, |
| "loss": 3.6365, |
| "step": 788480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7082512482286326e-05, |
| "loss": 3.6375, |
| "step": 788992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7074142913579535e-05, |
| "loss": 3.6443, |
| "step": 789504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7065756966069015e-05, |
| "loss": 3.6268, |
| "step": 790016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7057371018558495e-05, |
| "loss": 3.6269, |
| "step": 790528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7048985071047975e-05, |
| "loss": 3.6233, |
| "step": 791040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7040599123537454e-05, |
| "loss": 3.6244, |
| "step": 791552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7032213176026934e-05, |
| "loss": 3.6398, |
| "step": 792064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7023843607320144e-05, |
| "loss": 3.6126, |
| "step": 792576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.701545765980963e-05, |
| "loss": 3.6545, |
| "step": 793088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.700707171229911e-05, |
| "loss": 3.6316, |
| "step": 793600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.699868576478859e-05, |
| "loss": 3.611, |
| "step": 794112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.699029981727807e-05, |
| "loss": 3.6362, |
| "step": 794624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.698193024857128e-05, |
| "loss": 3.6241, |
| "step": 795136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.697354430106076e-05, |
| "loss": 3.6124, |
| "step": 795648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.696515835355024e-05, |
| "loss": 3.6328, |
| "step": 796160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.695677240603972e-05, |
| "loss": 3.637, |
| "step": 796672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.69483864585292e-05, |
| "loss": 3.6151, |
| "step": 797184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.694001688982241e-05, |
| "loss": 3.6324, |
| "step": 797696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.693163094231189e-05, |
| "loss": 3.6078, |
| "step": 798208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.692324499480137e-05, |
| "loss": 3.6219, |
| "step": 798720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.691485904729085e-05, |
| "loss": 3.6231, |
| "step": 799232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6906489478584064e-05, |
| "loss": 3.6213, |
| "step": 799744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6898103531073544e-05, |
| "loss": 3.6283, |
| "step": 800256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6889717583563024e-05, |
| "loss": 3.6217, |
| "step": 800768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6881331636052504e-05, |
| "loss": 3.6427, |
| "step": 801280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6872945688541984e-05, |
| "loss": 3.6237, |
| "step": 801792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.686457611983519e-05, |
| "loss": 3.6337, |
| "step": 802304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.685619017232467e-05, |
| "loss": 3.6208, |
| "step": 802816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.684780422481415e-05, |
| "loss": 3.6201, |
| "step": 803328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.683941827730363e-05, |
| "loss": 3.6326, |
| "step": 803840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.683104870859684e-05, |
| "loss": 3.6227, |
| "step": 804352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.682266276108632e-05, |
| "loss": 3.6263, |
| "step": 804864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.68142768135758e-05, |
| "loss": 3.633, |
| "step": 805376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.680589086606528e-05, |
| "loss": 3.6196, |
| "step": 805888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.679750491855476e-05, |
| "loss": 3.6109, |
| "step": 806400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.678913534984798e-05, |
| "loss": 3.6221, |
| "step": 806912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.678074940233746e-05, |
| "loss": 3.6321, |
| "step": 807424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.677236345482694e-05, |
| "loss": 3.6149, |
| "step": 807936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.676397750731642e-05, |
| "loss": 3.6301, |
| "step": 808448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.675559155980589e-05, |
| "loss": 3.6199, |
| "step": 808960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.674720561229537e-05, |
| "loss": 3.6146, |
| "step": 809472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.673881966478485e-05, |
| "loss": 3.6169, |
| "step": 809984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.673045009607806e-05, |
| "loss": 3.6223, |
| "step": 810496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6722080527371275e-05, |
| "loss": 3.6359, |
| "step": 811008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6713694579860755e-05, |
| "loss": 3.627, |
| "step": 811520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6705308632350235e-05, |
| "loss": 3.6304, |
| "step": 812032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6696922684839715e-05, |
| "loss": 3.6198, |
| "step": 812544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6688536737329195e-05, |
| "loss": 3.6166, |
| "step": 813056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6680150789818675e-05, |
| "loss": 3.6192, |
| "step": 813568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.667178122111189e-05, |
| "loss": 3.6304, |
| "step": 814080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6663395273601364e-05, |
| "loss": 3.6284, |
| "step": 814592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6655009326090844e-05, |
| "loss": 3.6296, |
| "step": 815104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6646623378580324e-05, |
| "loss": 3.6257, |
| "step": 815616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6638237431069804e-05, |
| "loss": 3.6158, |
| "step": 816128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6629851483559284e-05, |
| "loss": 3.6295, |
| "step": 816640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.662148191485249e-05, |
| "loss": 3.621, |
| "step": 817152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.661309596734197e-05, |
| "loss": 3.614, |
| "step": 817664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.660471001983145e-05, |
| "loss": 3.621, |
| "step": 818176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.659632407232094e-05, |
| "loss": 3.6201, |
| "step": 818688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.658795450361415e-05, |
| "loss": 3.6183, |
| "step": 819200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.657956855610363e-05, |
| "loss": 3.6162, |
| "step": 819712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.657118260859311e-05, |
| "loss": 3.6144, |
| "step": 820224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.656279666108259e-05, |
| "loss": 3.625, |
| "step": 820736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.655441071357207e-05, |
| "loss": 3.6239, |
| "step": 821248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.654602476606155e-05, |
| "loss": 3.622, |
| "step": 821760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.653763881855103e-05, |
| "loss": 3.6299, |
| "step": 822272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.652925287104051e-05, |
| "loss": 3.6362, |
| "step": 822784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.652089968113745e-05, |
| "loss": 3.6188, |
| "step": 823296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.651251373362693e-05, |
| "loss": 3.626, |
| "step": 823808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.650412778611641e-05, |
| "loss": 3.6168, |
| "step": 824320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6495741838605893e-05, |
| "loss": 3.6248, |
| "step": 824832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6487355891095373e-05, |
| "loss": 3.621, |
| "step": 825344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.647898632238858e-05, |
| "loss": 3.6126, |
| "step": 825856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.647060037487806e-05, |
| "loss": 3.6155, |
| "step": 826368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.646221442736754e-05, |
| "loss": 3.6247, |
| "step": 826880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.645382847985702e-05, |
| "loss": 3.6265, |
| "step": 827392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.64454425323465e-05, |
| "loss": 3.6144, |
| "step": 827904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.643705658483598e-05, |
| "loss": 3.6169, |
| "step": 828416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.642867063732546e-05, |
| "loss": 3.6199, |
| "step": 828928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.642030106861867e-05, |
| "loss": 3.6269, |
| "step": 829440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.641191512110815e-05, |
| "loss": 3.6272, |
| "step": 829952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.640352917359763e-05, |
| "loss": 3.6327, |
| "step": 830464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.639514322608711e-05, |
| "loss": 3.6124, |
| "step": 830976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.638675727857659e-05, |
| "loss": 3.6219, |
| "step": 831488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.637837133106608e-05, |
| "loss": 3.6257, |
| "step": 832000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.637000176235929e-05, |
| "loss": 3.6157, |
| "step": 832512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.636161581484877e-05, |
| "loss": 3.622, |
| "step": 833024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.635322986733825e-05, |
| "loss": 3.6245, |
| "step": 833536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.634484391982772e-05, |
| "loss": 3.6311, |
| "step": 834048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6336474351120936e-05, |
| "loss": 3.6336, |
| "step": 834560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6328088403610416e-05, |
| "loss": 3.6211, |
| "step": 835072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6319702456099896e-05, |
| "loss": 3.6225, |
| "step": 835584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.631131650858937e-05, |
| "loss": 3.6239, |
| "step": 836096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.630293056107885e-05, |
| "loss": 3.6177, |
| "step": 836608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6294560992372065e-05, |
| "loss": 3.619, |
| "step": 837120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6286175044861545e-05, |
| "loss": 3.6189, |
| "step": 837632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6277789097351025e-05, |
| "loss": 3.6129, |
| "step": 838144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6269403149840505e-05, |
| "loss": 3.6181, |
| "step": 838656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6261017202329985e-05, |
| "loss": 3.6169, |
| "step": 839168 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8503243923187256, |
| "eval_runtime": 308.5125, |
| "eval_samples_per_second": 1236.874, |
| "eval_steps_per_second": 38.653, |
| "step": 839520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6252647633623194e-05, |
| "loss": 3.6184, |
| "step": 839680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6244261686112674e-05, |
| "loss": 3.6133, |
| "step": 840192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6235875738602154e-05, |
| "loss": 3.6216, |
| "step": 840704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6227489791091634e-05, |
| "loss": 3.6246, |
| "step": 841216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6219103843581114e-05, |
| "loss": 3.6334, |
| "step": 841728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.621073427487432e-05, |
| "loss": 3.6197, |
| "step": 842240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.62023483273638e-05, |
| "loss": 3.6261, |
| "step": 842752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.619396237985328e-05, |
| "loss": 3.6154, |
| "step": 843264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.618557643234277e-05, |
| "loss": 3.6102, |
| "step": 843776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.617719048483225e-05, |
| "loss": 3.6167, |
| "step": 844288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616882091612546e-05, |
| "loss": 3.6276, |
| "step": 844800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616043496861494e-05, |
| "loss": 3.6089, |
| "step": 845312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.615206539990815e-05, |
| "loss": 3.6252, |
| "step": 845824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.614367945239763e-05, |
| "loss": 3.6054, |
| "step": 846336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.613529350488711e-05, |
| "loss": 3.608, |
| "step": 846848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.612690755737659e-05, |
| "loss": 3.599, |
| "step": 847360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.611852160986607e-05, |
| "loss": 3.6158, |
| "step": 847872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.611013566235555e-05, |
| "loss": 3.6146, |
| "step": 848384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.610174971484503e-05, |
| "loss": 3.6118, |
| "step": 848896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.609336376733451e-05, |
| "loss": 3.6142, |
| "step": 849408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.608497781982399e-05, |
| "loss": 3.6279, |
| "step": 849920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.60766082511172e-05, |
| "loss": 3.6209, |
| "step": 850432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.606822230360668e-05, |
| "loss": 3.6153, |
| "step": 850944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605983635609616e-05, |
| "loss": 3.6086, |
| "step": 851456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605145040858564e-05, |
| "loss": 3.6243, |
| "step": 851968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.604308083987885e-05, |
| "loss": 3.6174, |
| "step": 852480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.603469489236833e-05, |
| "loss": 3.6108, |
| "step": 852992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.602630894485781e-05, |
| "loss": 3.6174, |
| "step": 853504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.601792299734729e-05, |
| "loss": 3.6069, |
| "step": 854016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.600953704983677e-05, |
| "loss": 3.6043, |
| "step": 854528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.600115110232625e-05, |
| "loss": 3.6175, |
| "step": 855040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.599278153361946e-05, |
| "loss": 3.6133, |
| "step": 855552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.598439558610894e-05, |
| "loss": 3.6219, |
| "step": 856064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.597600963859842e-05, |
| "loss": 3.6236, |
| "step": 856576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.59676236910879e-05, |
| "loss": 3.6103, |
| "step": 857088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.595923774357739e-05, |
| "loss": 3.6202, |
| "step": 857600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5950868174870596e-05, |
| "loss": 3.6214, |
| "step": 858112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5942482227360076e-05, |
| "loss": 3.6045, |
| "step": 858624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5934096279849556e-05, |
| "loss": 3.6082, |
| "step": 859136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.592571033233903e-05, |
| "loss": 3.5995, |
| "step": 859648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5917340763632245e-05, |
| "loss": 3.6007, |
| "step": 860160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5908954816121725e-05, |
| "loss": 3.6092, |
| "step": 860672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5900568868611205e-05, |
| "loss": 3.6215, |
| "step": 861184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.589218292110068e-05, |
| "loss": 3.6082, |
| "step": 861696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.588379697359016e-05, |
| "loss": 3.6046, |
| "step": 862208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5875427404883374e-05, |
| "loss": 3.6132, |
| "step": 862720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5867041457372854e-05, |
| "loss": 3.6123, |
| "step": 863232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5858655509862334e-05, |
| "loss": 3.6001, |
| "step": 863744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5850269562351814e-05, |
| "loss": 3.5905, |
| "step": 864256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.584189999364503e-05, |
| "loss": 3.6151, |
| "step": 864768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.58335140461345e-05, |
| "loss": 3.6209, |
| "step": 865280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.582512809862398e-05, |
| "loss": 3.6209, |
| "step": 865792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.581674215111346e-05, |
| "loss": 3.6073, |
| "step": 866304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.580837258240668e-05, |
| "loss": 3.6065, |
| "step": 866816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.579998663489615e-05, |
| "loss": 3.6026, |
| "step": 867328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.579160068738563e-05, |
| "loss": 3.6042, |
| "step": 867840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.578321473987511e-05, |
| "loss": 3.6209, |
| "step": 868352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.577484517116833e-05, |
| "loss": 3.5956, |
| "step": 868864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.576645922365781e-05, |
| "loss": 3.6303, |
| "step": 869376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.575807327614729e-05, |
| "loss": 3.6099, |
| "step": 869888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.574968732863677e-05, |
| "loss": 3.5941, |
| "step": 870400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.574130138112625e-05, |
| "loss": 3.6134, |
| "step": 870912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.573293181241946e-05, |
| "loss": 3.6043, |
| "step": 871424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.572454586490894e-05, |
| "loss": 3.5942, |
| "step": 871936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.571615991739842e-05, |
| "loss": 3.6099, |
| "step": 872448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.57077739698879e-05, |
| "loss": 3.6168, |
| "step": 872960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569938802237738e-05, |
| "loss": 3.5965, |
| "step": 873472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569100207486686e-05, |
| "loss": 3.6092, |
| "step": 873984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.568261612735634e-05, |
| "loss": 3.5876, |
| "step": 874496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.567423017984582e-05, |
| "loss": 3.6054, |
| "step": 875008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.566586061113903e-05, |
| "loss": 3.6032, |
| "step": 875520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.565747466362851e-05, |
| "loss": 3.6064, |
| "step": 876032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564908871611799e-05, |
| "loss": 3.6051, |
| "step": 876544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564070276860747e-05, |
| "loss": 3.6037, |
| "step": 877056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.563233319990068e-05, |
| "loss": 3.6213, |
| "step": 877568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.562394725239016e-05, |
| "loss": 3.6055, |
| "step": 878080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.561556130487964e-05, |
| "loss": 3.6117, |
| "step": 878592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.560717535736912e-05, |
| "loss": 3.6025, |
| "step": 879104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.559880578866233e-05, |
| "loss": 3.6002, |
| "step": 879616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.559041984115181e-05, |
| "loss": 3.6135, |
| "step": 880128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.558203389364129e-05, |
| "loss": 3.6038, |
| "step": 880640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.557364794613077e-05, |
| "loss": 3.6035, |
| "step": 881152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.556526199862025e-05, |
| "loss": 3.6137, |
| "step": 881664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5556892429913466e-05, |
| "loss": 3.6032, |
| "step": 882176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5548506482402946e-05, |
| "loss": 3.5865, |
| "step": 882688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5540120534892426e-05, |
| "loss": 3.6041, |
| "step": 883200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5531734587381906e-05, |
| "loss": 3.6093, |
| "step": 883712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5523348639871386e-05, |
| "loss": 3.5997, |
| "step": 884224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5514979071164595e-05, |
| "loss": 3.605, |
| "step": 884736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5506593123654075e-05, |
| "loss": 3.6022, |
| "step": 885248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5498207176143555e-05, |
| "loss": 3.5952, |
| "step": 885760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5489821228633035e-05, |
| "loss": 3.5981, |
| "step": 886272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5481435281122515e-05, |
| "loss": 3.6002, |
| "step": 886784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5473065712415724e-05, |
| "loss": 3.612, |
| "step": 887296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5464679764905204e-05, |
| "loss": 3.6083, |
| "step": 887808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5456293817394684e-05, |
| "loss": 3.6131, |
| "step": 888320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5447907869884164e-05, |
| "loss": 3.5953, |
| "step": 888832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543953830117738e-05, |
| "loss": 3.5987, |
| "step": 889344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543115235366686e-05, |
| "loss": 3.6029, |
| "step": 889856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.542276640615634e-05, |
| "loss": 3.6095, |
| "step": 890368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.541438045864581e-05, |
| "loss": 3.6094, |
| "step": 890880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.540599451113529e-05, |
| "loss": 3.6094, |
| "step": 891392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.539760856362477e-05, |
| "loss": 3.6018, |
| "step": 891904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.538923899491799e-05, |
| "loss": 3.5969, |
| "step": 892416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.538085304740746e-05, |
| "loss": 3.6098, |
| "step": 892928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.537246709989694e-05, |
| "loss": 3.6004, |
| "step": 893440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.536408115238642e-05, |
| "loss": 3.6012, |
| "step": 893952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.535571158367964e-05, |
| "loss": 3.6004, |
| "step": 894464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.534732563616912e-05, |
| "loss": 3.5994, |
| "step": 894976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.53389396886586e-05, |
| "loss": 3.5994, |
| "step": 895488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.533055374114808e-05, |
| "loss": 3.5953, |
| "step": 896000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.532216779363756e-05, |
| "loss": 3.5935, |
| "step": 896512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5313798224930766e-05, |
| "loss": 3.6059, |
| "step": 897024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5305412277420246e-05, |
| "loss": 3.6031, |
| "step": 897536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5297026329909726e-05, |
| "loss": 3.5996, |
| "step": 898048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5288640382399206e-05, |
| "loss": 3.61, |
| "step": 898560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5280254434888686e-05, |
| "loss": 3.6199, |
| "step": 899072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5271868487378166e-05, |
| "loss": 3.5991, |
| "step": 899584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5263498918671375e-05, |
| "loss": 3.6015, |
| "step": 900096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5255112971160855e-05, |
| "loss": 3.5997, |
| "step": 900608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.524672702365034e-05, |
| "loss": 3.603, |
| "step": 901120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.523834107613982e-05, |
| "loss": 3.6015, |
| "step": 901632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.522997150743303e-05, |
| "loss": 3.5937, |
| "step": 902144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.522158555992251e-05, |
| "loss": 3.5974, |
| "step": 902656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.521319961241199e-05, |
| "loss": 3.6053, |
| "step": 903168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.520481366490147e-05, |
| "loss": 3.6008, |
| "step": 903680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.519642771739095e-05, |
| "loss": 3.5992, |
| "step": 904192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.518804176988043e-05, |
| "loss": 3.6012, |
| "step": 904704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517965582236991e-05, |
| "loss": 3.601, |
| "step": 905216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517128625366312e-05, |
| "loss": 3.6043, |
| "step": 905728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.51629003061526e-05, |
| "loss": 3.6081, |
| "step": 906240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.515451435864208e-05, |
| "loss": 3.61, |
| "step": 906752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.514612841113156e-05, |
| "loss": 3.5973, |
| "step": 907264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5137758842424776e-05, |
| "loss": 3.6014, |
| "step": 907776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5129372894914256e-05, |
| "loss": 3.6074, |
| "step": 908288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5120986947403736e-05, |
| "loss": 3.5954, |
| "step": 908800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5112600999893216e-05, |
| "loss": 3.5988, |
| "step": 909312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5104231431186425e-05, |
| "loss": 3.605, |
| "step": 909824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5095845483675905e-05, |
| "loss": 3.6135, |
| "step": 910336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5087459536165385e-05, |
| "loss": 3.6133, |
| "step": 910848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5079073588654864e-05, |
| "loss": 3.6023, |
| "step": 911360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5070687641144344e-05, |
| "loss": 3.603, |
| "step": 911872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5062318072437554e-05, |
| "loss": 3.603, |
| "step": 912384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5053932124927034e-05, |
| "loss": 3.6018, |
| "step": 912896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5045546177416513e-05, |
| "loss": 3.5957, |
| "step": 913408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5037160229905993e-05, |
| "loss": 3.6037, |
| "step": 913920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.502879066119921e-05, |
| "loss": 3.5915, |
| "step": 914432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.502040471368869e-05, |
| "loss": 3.6013, |
| "step": 914944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.501201876617817e-05, |
| "loss": 3.5968, |
| "step": 915456 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8492636680603027, |
| "eval_runtime": 303.1723, |
| "eval_samples_per_second": 1258.66, |
| "eval_steps_per_second": 39.334, |
| "step": 915840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.500363281866765e-05, |
| "loss": 3.6059, |
| "step": 915968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.499524687115712e-05, |
| "loss": 3.588, |
| "step": 916480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.498687730245034e-05, |
| "loss": 3.5973, |
| "step": 916992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.497849135493982e-05, |
| "loss": 3.6096, |
| "step": 917504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.497010540742929e-05, |
| "loss": 3.6076, |
| "step": 918016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.496171945991877e-05, |
| "loss": 3.6057, |
| "step": 918528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.495334989121199e-05, |
| "loss": 3.6053, |
| "step": 919040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.494496394370147e-05, |
| "loss": 3.5951, |
| "step": 919552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.493657799619095e-05, |
| "loss": 3.5899, |
| "step": 920064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.492819204868043e-05, |
| "loss": 3.6026, |
| "step": 920576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491980610116991e-05, |
| "loss": 3.6003, |
| "step": 921088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491143653246312e-05, |
| "loss": 3.5972, |
| "step": 921600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.490306696375633e-05, |
| "loss": 3.6049, |
| "step": 922112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.489468101624581e-05, |
| "loss": 3.5848, |
| "step": 922624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.488629506873529e-05, |
| "loss": 3.5908, |
| "step": 923136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4877909121224765e-05, |
| "loss": 3.5791, |
| "step": 923648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4869523173714245e-05, |
| "loss": 3.6009, |
| "step": 924160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4861137226203725e-05, |
| "loss": 3.5915, |
| "step": 924672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4852751278693205e-05, |
| "loss": 3.5955, |
| "step": 925184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4844365331182685e-05, |
| "loss": 3.5968, |
| "step": 925696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4835979383672165e-05, |
| "loss": 3.6083, |
| "step": 926208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.482760981496538e-05, |
| "loss": 3.6027, |
| "step": 926720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.481922386745486e-05, |
| "loss": 3.5966, |
| "step": 927232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.481083791994434e-05, |
| "loss": 3.5921, |
| "step": 927744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.480245197243382e-05, |
| "loss": 3.6064, |
| "step": 928256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.479408240372703e-05, |
| "loss": 3.5971, |
| "step": 928768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.478569645621651e-05, |
| "loss": 3.5905, |
| "step": 929280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.477731050870599e-05, |
| "loss": 3.6005, |
| "step": 929792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.476892456119547e-05, |
| "loss": 3.5833, |
| "step": 930304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.476053861368495e-05, |
| "loss": 3.5923, |
| "step": 930816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.475215266617443e-05, |
| "loss": 3.5997, |
| "step": 931328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.474378309746764e-05, |
| "loss": 3.5957, |
| "step": 931840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.473539714995712e-05, |
| "loss": 3.5994, |
| "step": 932352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4727011202446605e-05, |
| "loss": 3.6056, |
| "step": 932864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4718625254936085e-05, |
| "loss": 3.5918, |
| "step": 933376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4710239307425565e-05, |
| "loss": 3.6005, |
| "step": 933888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4701853359915045e-05, |
| "loss": 3.6024, |
| "step": 934400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4693483791208254e-05, |
| "loss": 3.5846, |
| "step": 934912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4685097843697734e-05, |
| "loss": 3.5897, |
| "step": 935424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4676711896187214e-05, |
| "loss": 3.583, |
| "step": 935936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4668325948676694e-05, |
| "loss": 3.5834, |
| "step": 936448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4659940001166174e-05, |
| "loss": 3.5915, |
| "step": 936960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4651554053655654e-05, |
| "loss": 3.6026, |
| "step": 937472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.464318448494886e-05, |
| "loss": 3.589, |
| "step": 937984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.463479853743834e-05, |
| "loss": 3.5881, |
| "step": 938496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.462641258992782e-05, |
| "loss": 3.5937, |
| "step": 939008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.46180266424173e-05, |
| "loss": 3.5947, |
| "step": 939520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.460965707371052e-05, |
| "loss": 3.582, |
| "step": 940032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.46012711262e-05, |
| "loss": 3.5708, |
| "step": 940544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.459288517868948e-05, |
| "loss": 3.5937, |
| "step": 941056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.458449923117895e-05, |
| "loss": 3.6059, |
| "step": 941568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.457612966247217e-05, |
| "loss": 3.6, |
| "step": 942080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.456774371496165e-05, |
| "loss": 3.5853, |
| "step": 942592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.455935776745113e-05, |
| "loss": 3.589, |
| "step": 943104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.45509718199406e-05, |
| "loss": 3.5867, |
| "step": 943616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.454260225123382e-05, |
| "loss": 3.5788, |
| "step": 944128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.45342163037233e-05, |
| "loss": 3.6097, |
| "step": 944640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.452583035621278e-05, |
| "loss": 3.5736, |
| "step": 945152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4517444408702257e-05, |
| "loss": 3.609, |
| "step": 945664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4509058461191737e-05, |
| "loss": 3.5915, |
| "step": 946176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.450068889248495e-05, |
| "loss": 3.576, |
| "step": 946688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4492302944974426e-05, |
| "loss": 3.5945, |
| "step": 947200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4483916997463906e-05, |
| "loss": 3.5867, |
| "step": 947712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4475531049953386e-05, |
| "loss": 3.5747, |
| "step": 948224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.44671614812466e-05, |
| "loss": 3.5926, |
| "step": 948736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4458775533736075e-05, |
| "loss": 3.5979, |
| "step": 949248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4450389586225555e-05, |
| "loss": 3.5792, |
| "step": 949760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4442003638715034e-05, |
| "loss": 3.5899, |
| "step": 950272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4433617691204514e-05, |
| "loss": 3.57, |
| "step": 950784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4425231743693994e-05, |
| "loss": 3.5865, |
| "step": 951296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.441684579618348e-05, |
| "loss": 3.5844, |
| "step": 951808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.440847622747669e-05, |
| "loss": 3.5881, |
| "step": 952320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.440009027996617e-05, |
| "loss": 3.5839, |
| "step": 952832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.439170433245565e-05, |
| "loss": 3.5866, |
| "step": 953344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.438331838494513e-05, |
| "loss": 3.6027, |
| "step": 953856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.437494881623834e-05, |
| "loss": 3.587, |
| "step": 954368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.436656286872782e-05, |
| "loss": 3.5908, |
| "step": 954880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.43581769212173e-05, |
| "loss": 3.5852, |
| "step": 955392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434979097370678e-05, |
| "loss": 3.5818, |
| "step": 955904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434140502619626e-05, |
| "loss": 3.5944, |
| "step": 956416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.433303545748947e-05, |
| "loss": 3.5845, |
| "step": 956928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.432464950997895e-05, |
| "loss": 3.5923, |
| "step": 957440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4316263562468435e-05, |
| "loss": 3.5908, |
| "step": 957952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4307877614957915e-05, |
| "loss": 3.5898, |
| "step": 958464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4299491667447395e-05, |
| "loss": 3.5655, |
| "step": 958976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4291122098740604e-05, |
| "loss": 3.588, |
| "step": 959488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4282736151230084e-05, |
| "loss": 3.5879, |
| "step": 960000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4274350203719564e-05, |
| "loss": 3.5814, |
| "step": 960512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4265964256209044e-05, |
| "loss": 3.5873, |
| "step": 961024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.425759468750225e-05, |
| "loss": 3.5822, |
| "step": 961536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.424920873999173e-05, |
| "loss": 3.5783, |
| "step": 962048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.424082279248121e-05, |
| "loss": 3.5784, |
| "step": 962560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.423243684497069e-05, |
| "loss": 3.584, |
| "step": 963072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.42240672762639e-05, |
| "loss": 3.5905, |
| "step": 963584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.421568132875338e-05, |
| "loss": 3.5913, |
| "step": 964096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.420729538124287e-05, |
| "loss": 3.5938, |
| "step": 964608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.419890943373235e-05, |
| "loss": 3.5829, |
| "step": 965120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.419053986502556e-05, |
| "loss": 3.5793, |
| "step": 965632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.418215391751504e-05, |
| "loss": 3.5827, |
| "step": 966144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.417376797000452e-05, |
| "loss": 3.5918, |
| "step": 966656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4165382022494e-05, |
| "loss": 3.5904, |
| "step": 967168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.415699607498348e-05, |
| "loss": 3.5904, |
| "step": 967680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4148626506276686e-05, |
| "loss": 3.5849, |
| "step": 968192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4140240558766166e-05, |
| "loss": 3.5796, |
| "step": 968704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4131854611255646e-05, |
| "loss": 3.5913, |
| "step": 969216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4123468663745126e-05, |
| "loss": 3.5822, |
| "step": 969728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4115099095038335e-05, |
| "loss": 3.581, |
| "step": 970240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.410671314752782e-05, |
| "loss": 3.5844, |
| "step": 970752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.40983272000173e-05, |
| "loss": 3.581, |
| "step": 971264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.408994125250678e-05, |
| "loss": 3.5797, |
| "step": 971776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.408157168379999e-05, |
| "loss": 3.5806, |
| "step": 972288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.407318573628947e-05, |
| "loss": 3.5729, |
| "step": 972800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.406479978877895e-05, |
| "loss": 3.5862, |
| "step": 973312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.405641384126843e-05, |
| "loss": 3.5906, |
| "step": 973824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.404804427256164e-05, |
| "loss": 3.578, |
| "step": 974336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.403965832505112e-05, |
| "loss": 3.5908, |
| "step": 974848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.40312723775406e-05, |
| "loss": 3.5987, |
| "step": 975360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.402288643003008e-05, |
| "loss": 3.5816, |
| "step": 975872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.401451686132329e-05, |
| "loss": 3.5845, |
| "step": 976384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4006130913812776e-05, |
| "loss": 3.5834, |
| "step": 976896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3997744966302256e-05, |
| "loss": 3.5823, |
| "step": 977408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3989359018791736e-05, |
| "loss": 3.5824, |
| "step": 977920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398097307128121e-05, |
| "loss": 3.5779, |
| "step": 978432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.397258712377069e-05, |
| "loss": 3.5816, |
| "step": 978944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3964217555063905e-05, |
| "loss": 3.5864, |
| "step": 979456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3955831607553385e-05, |
| "loss": 3.5815, |
| "step": 979968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.394744566004286e-05, |
| "loss": 3.5852, |
| "step": 980480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.393905971253234e-05, |
| "loss": 3.5831, |
| "step": 980992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.393067376502182e-05, |
| "loss": 3.5832, |
| "step": 981504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.392230419631503e-05, |
| "loss": 3.5819, |
| "step": 982016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3913918248804514e-05, |
| "loss": 3.5962, |
| "step": 982528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3905532301293993e-05, |
| "loss": 3.5909, |
| "step": 983040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3897146353783473e-05, |
| "loss": 3.5811, |
| "step": 983552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388877678507668e-05, |
| "loss": 3.5789, |
| "step": 984064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388039083756616e-05, |
| "loss": 3.5925, |
| "step": 984576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.387200489005564e-05, |
| "loss": 3.575, |
| "step": 985088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.386361894254512e-05, |
| "loss": 3.586, |
| "step": 985600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.38552329950346e-05, |
| "loss": 3.5867, |
| "step": 986112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.384686342632781e-05, |
| "loss": 3.5951, |
| "step": 986624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383847747881729e-05, |
| "loss": 3.5912, |
| "step": 987136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383009153130677e-05, |
| "loss": 3.59, |
| "step": 987648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.382170558379625e-05, |
| "loss": 3.5819, |
| "step": 988160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.381333601508947e-05, |
| "loss": 3.5908, |
| "step": 988672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.380495006757895e-05, |
| "loss": 3.5799, |
| "step": 989184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.379656412006843e-05, |
| "loss": 3.578, |
| "step": 989696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.378817817255791e-05, |
| "loss": 3.5843, |
| "step": 990208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3779808603851116e-05, |
| "loss": 3.5724, |
| "step": 990720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3771422656340596e-05, |
| "loss": 3.5858, |
| "step": 991232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3763036708830076e-05, |
| "loss": 3.5769, |
| "step": 991744 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.849591016769409, |
| "eval_runtime": 303.1829, |
| "eval_samples_per_second": 1258.616, |
| "eval_steps_per_second": 39.333, |
| "step": 992160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3754650761319556e-05, |
| "loss": 3.5821, |
| "step": 992256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3746264813809036e-05, |
| "loss": 3.5756, |
| "step": 992768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3737878866298516e-05, |
| "loss": 3.5809, |
| "step": 993280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3729492918787996e-05, |
| "loss": 3.595, |
| "step": 993792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3721106971277476e-05, |
| "loss": 3.5903, |
| "step": 994304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3712737402570685e-05, |
| "loss": 3.5935, |
| "step": 994816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3704351455060165e-05, |
| "loss": 3.5828, |
| "step": 995328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.369596550754965e-05, |
| "loss": 3.5786, |
| "step": 995840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.368757956003913e-05, |
| "loss": 3.5694, |
| "step": 996352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367920999133234e-05, |
| "loss": 3.5863, |
| "step": 996864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367082404382182e-05, |
| "loss": 3.5818, |
| "step": 997376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.36624380963113e-05, |
| "loss": 3.579, |
| "step": 997888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.365406852760451e-05, |
| "loss": 3.5921, |
| "step": 998400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.364568258009399e-05, |
| "loss": 3.5648, |
| "step": 998912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.363729663258347e-05, |
| "loss": 3.5716, |
| "step": 999424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362891068507295e-05, |
| "loss": 3.5694, |
| "step": 999936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362052473756243e-05, |
| "loss": 3.5778, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.361213879005191e-05, |
| "loss": 3.5759, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.360375284254139e-05, |
| "loss": 3.5748, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.359536689503087e-05, |
| "loss": 3.582, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.358698094752035e-05, |
| "loss": 3.586, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3578611378813565e-05, |
| "loss": 3.5855, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3570225431303045e-05, |
| "loss": 3.5802, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.356183948379252e-05, |
| "loss": 3.572, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3553453536282e-05, |
| "loss": 3.5926, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3545083967575214e-05, |
| "loss": 3.5766, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3536698020064694e-05, |
| "loss": 3.5733, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.352831207255417e-05, |
| "loss": 3.5822, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.351992612504365e-05, |
| "loss": 3.5686, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.351154017753313e-05, |
| "loss": 3.5752, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.350317060882634e-05, |
| "loss": 3.5778, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.349478466131582e-05, |
| "loss": 3.5781, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34863987138053e-05, |
| "loss": 3.5834, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.347801276629478e-05, |
| "loss": 3.5884, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346964319758799e-05, |
| "loss": 3.5739, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346125725007747e-05, |
| "loss": 3.5843, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.345287130256695e-05, |
| "loss": 3.5878, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.344448535505643e-05, |
| "loss": 3.5676, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.343611578634964e-05, |
| "loss": 3.5737, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.342772983883912e-05, |
| "loss": 3.5608, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34193438913286e-05, |
| "loss": 3.57, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.341095794381808e-05, |
| "loss": 3.568, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.340258837511129e-05, |
| "loss": 3.59, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.339420242760078e-05, |
| "loss": 3.5679, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.338581648009026e-05, |
| "loss": 3.5709, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.337743053257974e-05, |
| "loss": 3.5755, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3369044585069217e-05, |
| "loss": 3.5779, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3360675016362426e-05, |
| "loss": 3.5671, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3352289068851906e-05, |
| "loss": 3.5547, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3343903121341386e-05, |
| "loss": 3.5772, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3335533552634595e-05, |
| "loss": 3.5839, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3327147605124075e-05, |
| "loss": 3.5804, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3318761657613555e-05, |
| "loss": 3.5705, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3310375710103035e-05, |
| "loss": 3.5665, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3301989762592515e-05, |
| "loss": 3.5741, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.329362019388573e-05, |
| "loss": 3.5606, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.328523424637521e-05, |
| "loss": 3.5907, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.327684829886469e-05, |
| "loss": 3.56, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.326846235135417e-05, |
| "loss": 3.5915, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.326009278264738e-05, |
| "loss": 3.5787, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.325170683513686e-05, |
| "loss": 3.5573, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.324332088762634e-05, |
| "loss": 3.576, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.323493494011582e-05, |
| "loss": 3.5736, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.322656537140903e-05, |
| "loss": 3.5587, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.321817942389851e-05, |
| "loss": 3.567, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320979347638799e-05, |
| "loss": 3.5853, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320140752887747e-05, |
| "loss": 3.5651, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.319302158136695e-05, |
| "loss": 3.5693, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.318463563385643e-05, |
| "loss": 3.5558, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3176249686345915e-05, |
| "loss": 3.5717, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3167880117639124e-05, |
| "loss": 3.5631, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3159494170128604e-05, |
| "loss": 3.5702, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3151108222618084e-05, |
| "loss": 3.568, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3142722275107564e-05, |
| "loss": 3.5711, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3134336327597044e-05, |
| "loss": 3.5833, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.312596675889025e-05, |
| "loss": 3.5684, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.311758081137973e-05, |
| "loss": 3.5727, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.310919486386921e-05, |
| "loss": 3.5723, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.310080891635869e-05, |
| "loss": 3.5676, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.30924393476519e-05, |
| "loss": 3.5741, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.308405340014138e-05, |
| "loss": 3.5677, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.307566745263087e-05, |
| "loss": 3.5703, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.306728150512035e-05, |
| "loss": 3.5759, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305891193641356e-05, |
| "loss": 3.5705, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305052598890304e-05, |
| "loss": 3.5537, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.304214004139252e-05, |
| "loss": 3.5703, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3033754093882e-05, |
| "loss": 3.5648, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.302536814637147e-05, |
| "loss": 3.5693, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3016998577664686e-05, |
| "loss": 3.5673, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3008612630154166e-05, |
| "loss": 3.5712, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3000226682643646e-05, |
| "loss": 3.5577, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.299184073513312e-05, |
| "loss": 3.5619, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2983471166426335e-05, |
| "loss": 3.564, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.297508521891582e-05, |
| "loss": 3.577, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.29666992714053e-05, |
| "loss": 3.5709, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2958313323894775e-05, |
| "loss": 3.5806, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2949927376384255e-05, |
| "loss": 3.5602, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.294155780767747e-05, |
| "loss": 3.5661, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2933171860166944e-05, |
| "loss": 3.5667, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2924785912656424e-05, |
| "loss": 3.5748, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2916399965145904e-05, |
| "loss": 3.5714, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.290803039643912e-05, |
| "loss": 3.5745, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.289964444892859e-05, |
| "loss": 3.5669, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.289125850141807e-05, |
| "loss": 3.5641, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.288287255390756e-05, |
| "loss": 3.5786, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.287448660639704e-05, |
| "loss": 3.5609, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.286611703769025e-05, |
| "loss": 3.5634, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.285773109017973e-05, |
| "loss": 3.5686, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284934514266921e-05, |
| "loss": 3.5654, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284095919515869e-05, |
| "loss": 3.5627, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.28325896264519e-05, |
| "loss": 3.5676, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.282420367894138e-05, |
| "loss": 3.5563, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.281581773143086e-05, |
| "loss": 3.5703, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.280743178392034e-05, |
| "loss": 3.5711, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.279906221521355e-05, |
| "loss": 3.5606, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.279067626770303e-05, |
| "loss": 3.5762, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.278229032019251e-05, |
| "loss": 3.5806, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2773904372681994e-05, |
| "loss": 3.5671, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2765518425171474e-05, |
| "loss": 3.5693, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.275714885646468e-05, |
| "loss": 3.5651, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.274876290895416e-05, |
| "loss": 3.5648, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.274037696144364e-05, |
| "loss": 3.5634, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.273199101393312e-05, |
| "loss": 3.5618, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.272362144522633e-05, |
| "loss": 3.5641, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.271523549771581e-05, |
| "loss": 3.5726, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.270684955020529e-05, |
| "loss": 3.5685, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.269846360269477e-05, |
| "loss": 3.5636, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.269007765518425e-05, |
| "loss": 3.5652, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.268169170767373e-05, |
| "loss": 3.5658, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.267332213896695e-05, |
| "loss": 3.5624, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.266493619145643e-05, |
| "loss": 3.5795, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.265655024394591e-05, |
| "loss": 3.5754, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.264816429643539e-05, |
| "loss": 3.5676, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2639794727728596e-05, |
| "loss": 3.5618, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2631408780218076e-05, |
| "loss": 3.5706, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2623022832707556e-05, |
| "loss": 3.5634, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2614636885197036e-05, |
| "loss": 3.5669, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2606250937686516e-05, |
| "loss": 3.5694, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2597881368979725e-05, |
| "loss": 3.5855, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2589495421469205e-05, |
| "loss": 3.5679, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2581109473958685e-05, |
| "loss": 3.574, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2572723526448165e-05, |
| "loss": 3.5659, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.256435395774138e-05, |
| "loss": 3.5742, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.255596801023086e-05, |
| "loss": 3.5639, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.254758206272034e-05, |
| "loss": 3.5595, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253919611520982e-05, |
| "loss": 3.57, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253082654650303e-05, |
| "loss": 3.5547, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.252244059899251e-05, |
| "loss": 3.5697, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.251405465148199e-05, |
| "loss": 3.558, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.850817918777466, |
| "eval_runtime": 303.4458, |
| "eval_samples_per_second": 1257.526, |
| "eval_steps_per_second": 39.299, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.250566870397147e-05, |
| "loss": 3.5688, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.249729913526468e-05, |
| "loss": 3.5615, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.248891318775416e-05, |
| "loss": 3.5612, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.248052724024364e-05, |
| "loss": 3.5765, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.247214129273312e-05, |
| "loss": 3.5716, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2463771724026335e-05, |
| "loss": 3.5774, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2455385776515814e-05, |
| "loss": 3.5634, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2446999829005294e-05, |
| "loss": 3.5618, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2438613881494774e-05, |
| "loss": 3.5585, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2430244312787984e-05, |
| "loss": 3.5681, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2421858365277463e-05, |
| "loss": 3.5669, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2413472417766943e-05, |
| "loss": 3.5636, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.240510284906015e-05, |
| "loss": 3.5753, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.239671690154963e-05, |
| "loss": 3.5507, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238833095403911e-05, |
| "loss": 3.5505, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.237994500652859e-05, |
| "loss": 3.5546, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.237155905901807e-05, |
| "loss": 3.5577, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.236317311150755e-05, |
| "loss": 3.5608, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.235478716399703e-05, |
| "loss": 3.5613, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.234640121648651e-05, |
| "loss": 3.5613, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.233801526897599e-05, |
| "loss": 3.5704, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.23296457002692e-05, |
| "loss": 3.5663, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.232125975275868e-05, |
| "loss": 3.567, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.231287380524816e-05, |
| "loss": 3.5573, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.230448785773764e-05, |
| "loss": 3.576, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.229611828903085e-05, |
| "loss": 3.5614, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.228773234152033e-05, |
| "loss": 3.5617, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.227934639400981e-05, |
| "loss": 3.5602, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.227096044649929e-05, |
| "loss": 3.5547, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.226257449898878e-05, |
| "loss": 3.5601, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2254204930281986e-05, |
| "loss": 3.5602, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2245818982771466e-05, |
| "loss": 3.5624, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2237433035260946e-05, |
| "loss": 3.5696, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2229047087750426e-05, |
| "loss": 3.5707, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2220677519043635e-05, |
| "loss": 3.5598, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2212291571533115e-05, |
| "loss": 3.5681, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2203905624022595e-05, |
| "loss": 3.5662, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2195519676512075e-05, |
| "loss": 3.5539, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2187150107805284e-05, |
| "loss": 3.5577, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2178764160294764e-05, |
| "loss": 3.546, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2170378212784244e-05, |
| "loss": 3.5567, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.216199226527373e-05, |
| "loss": 3.5536, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.215362269656694e-05, |
| "loss": 3.5714, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.214523674905642e-05, |
| "loss": 3.5533, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.21368508015459e-05, |
| "loss": 3.5545, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.212846485403538e-05, |
| "loss": 3.5614, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.212007890652486e-05, |
| "loss": 3.5578, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.211170933781807e-05, |
| "loss": 3.558, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.210332339030755e-05, |
| "loss": 3.5328, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.209493744279703e-05, |
| "loss": 3.56, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.208655149528651e-05, |
| "loss": 3.5686, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.207818192657972e-05, |
| "loss": 3.5678, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.20697959790692e-05, |
| "loss": 3.5539, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.206141003155868e-05, |
| "loss": 3.5508, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2053024084048164e-05, |
| "loss": 3.5585, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.204465451534137e-05, |
| "loss": 3.5476, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.203626856783085e-05, |
| "loss": 3.5705, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.202788262032033e-05, |
| "loss": 3.5436, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201949667280981e-05, |
| "loss": 3.5773, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201112710410302e-05, |
| "loss": 3.5621, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.20027411565925e-05, |
| "loss": 3.5407, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.199435520908198e-05, |
| "loss": 3.5575, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.198596926157146e-05, |
| "loss": 3.5622, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.197759969286467e-05, |
| "loss": 3.5414, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.196921374535415e-05, |
| "loss": 3.5479, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.196082779784363e-05, |
| "loss": 3.5705, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.195244185033312e-05, |
| "loss": 3.5528, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.19440559028226e-05, |
| "loss": 3.5523, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.193568633411581e-05, |
| "loss": 3.5373, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.192730038660529e-05, |
| "loss": 3.5557, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.191891443909477e-05, |
| "loss": 3.5481, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.191052849158425e-05, |
| "loss": 3.5578, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1902158922877456e-05, |
| "loss": 3.5485, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1893772975366936e-05, |
| "loss": 3.5569, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1885387027856416e-05, |
| "loss": 3.5639, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1877001080345896e-05, |
| "loss": 3.5579, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1868631511639105e-05, |
| "loss": 3.5552, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1860245564128585e-05, |
| "loss": 3.5533, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.185185961661807e-05, |
| "loss": 3.5556, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.184347366910755e-05, |
| "loss": 3.5577, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.183510410040076e-05, |
| "loss": 3.551, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.182671815289024e-05, |
| "loss": 3.555, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.181833220537972e-05, |
| "loss": 3.5561, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.18099462578692e-05, |
| "loss": 3.5551, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.180157668916241e-05, |
| "loss": 3.5393, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.179319074165189e-05, |
| "loss": 3.5553, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.178480479414137e-05, |
| "loss": 3.5502, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.177641884663085e-05, |
| "loss": 3.5539, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.176803289912033e-05, |
| "loss": 3.5556, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.175966333041354e-05, |
| "loss": 3.555, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1751277382903025e-05, |
| "loss": 3.5448, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1742891435392505e-05, |
| "loss": 3.5447, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1734505487881985e-05, |
| "loss": 3.5489, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1726135919175194e-05, |
| "loss": 3.5611, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1717749971664674e-05, |
| "loss": 3.5557, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1709364024154154e-05, |
| "loss": 3.5628, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.170097807664363e-05, |
| "loss": 3.5473, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.169260850793684e-05, |
| "loss": 3.5523, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.168422256042632e-05, |
| "loss": 3.545, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.16758366129158e-05, |
| "loss": 3.5614, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1667450665405276e-05, |
| "loss": 3.5547, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.165908109669849e-05, |
| "loss": 3.56, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.165069514918798e-05, |
| "loss": 3.5511, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.164230920167746e-05, |
| "loss": 3.5487, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.163392325416693e-05, |
| "loss": 3.5651, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.162553730665641e-05, |
| "loss": 3.5411, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.161716773794963e-05, |
| "loss": 3.5479, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.16087817904391e-05, |
| "loss": 3.5561, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.160039584292858e-05, |
| "loss": 3.5459, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.159200989541806e-05, |
| "loss": 3.5473, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.158364032671128e-05, |
| "loss": 3.5532, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.157525437920075e-05, |
| "loss": 3.5419, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.156686843169023e-05, |
| "loss": 3.5555, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1558482484179717e-05, |
| "loss": 3.5509, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155011291547293e-05, |
| "loss": 3.5521, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1541726967962406e-05, |
| "loss": 3.5599, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1533341020451886e-05, |
| "loss": 3.5639, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1524955072941366e-05, |
| "loss": 3.553, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1516585504234575e-05, |
| "loss": 3.5522, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1508199556724055e-05, |
| "loss": 3.5496, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1499813609213535e-05, |
| "loss": 3.5503, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1491427661703014e-05, |
| "loss": 3.5447, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1483041714192494e-05, |
| "loss": 3.5507, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1474672145485704e-05, |
| "loss": 3.544, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1466286197975183e-05, |
| "loss": 3.5584, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.145790025046467e-05, |
| "loss": 3.5513, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.144951430295415e-05, |
| "loss": 3.5525, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.144114473424736e-05, |
| "loss": 3.5509, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.143275878673684e-05, |
| "loss": 3.5495, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.142437283922632e-05, |
| "loss": 3.5479, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.14159868917158e-05, |
| "loss": 3.5644, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.140761732300901e-05, |
| "loss": 3.5604, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139923137549849e-05, |
| "loss": 3.5543, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139084542798797e-05, |
| "loss": 3.5412, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.138245948047745e-05, |
| "loss": 3.5563, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.137407353296693e-05, |
| "loss": 3.5486, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.136570396426014e-05, |
| "loss": 3.5524, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1357318016749624e-05, |
| "loss": 3.5561, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1348932069239104e-05, |
| "loss": 3.5679, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1340546121728584e-05, |
| "loss": 3.5521, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.133217655302179e-05, |
| "loss": 3.5614, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.132379060551127e-05, |
| "loss": 3.5513, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.131540465800075e-05, |
| "loss": 3.5561, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.130701871049023e-05, |
| "loss": 3.545, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.129864914178344e-05, |
| "loss": 3.5474, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.129026319427292e-05, |
| "loss": 3.5568, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.12818772467624e-05, |
| "loss": 3.5397, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.127349129925188e-05, |
| "loss": 3.5545, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.126512173054509e-05, |
| "loss": 3.547, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8513243198394775, |
| "eval_runtime": 303.1627, |
| "eval_samples_per_second": 1258.7, |
| "eval_steps_per_second": 39.335, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.125673578303457e-05, |
| "loss": 3.5527, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.124834983552406e-05, |
| "loss": 3.5462, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123996388801354e-05, |
| "loss": 3.5457, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123157794050302e-05, |
| "loss": 3.5588, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1223208371796227e-05, |
| "loss": 3.5596, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1214822424285706e-05, |
| "loss": 3.5653, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1206436476775186e-05, |
| "loss": 3.5465, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1198050529264666e-05, |
| "loss": 3.5484, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1189664581754146e-05, |
| "loss": 3.54, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1181295013047355e-05, |
| "loss": 3.5558, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1172909065536835e-05, |
| "loss": 3.5496, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1164523118026315e-05, |
| "loss": 3.5487, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1156137170515795e-05, |
| "loss": 3.5524, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.114776760180901e-05, |
| "loss": 3.5432, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.113938165429849e-05, |
| "loss": 3.5412, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.113099570678797e-05, |
| "loss": 3.5392, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.112260975927745e-05, |
| "loss": 3.5371, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.111424019057066e-05, |
| "loss": 3.5432, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.110585424306014e-05, |
| "loss": 3.5467, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.109746829554962e-05, |
| "loss": 3.5414, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.10890823480391e-05, |
| "loss": 3.5593, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.108071277933231e-05, |
| "loss": 3.551, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.107232683182179e-05, |
| "loss": 3.5489, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.106394088431127e-05, |
| "loss": 3.5448, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.105555493680075e-05, |
| "loss": 3.5597, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1047185368093965e-05, |
| "loss": 3.543, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1038799420583445e-05, |
| "loss": 3.5483, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1030413473072925e-05, |
| "loss": 3.541, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1022027525562405e-05, |
| "loss": 3.5449, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1013657956855614e-05, |
| "loss": 3.5434, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1005272009345094e-05, |
| "loss": 3.5421, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0996886061834574e-05, |
| "loss": 3.5521, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0988500114324054e-05, |
| "loss": 3.5539, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098013054561726e-05, |
| "loss": 3.5505, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097174459810674e-05, |
| "loss": 3.5462, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.096335865059622e-05, |
| "loss": 3.5513, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.09549727030857e-05, |
| "loss": 3.5515, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.094660313437892e-05, |
| "loss": 3.5353, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.09382171868684e-05, |
| "loss": 3.5459, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092983123935788e-05, |
| "loss": 3.5281, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092144529184736e-05, |
| "loss": 3.5423, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.091305934433683e-05, |
| "loss": 3.5361, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.090468977563005e-05, |
| "loss": 3.5583, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.089630382811953e-05, |
| "loss": 3.5425, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.088791788060901e-05, |
| "loss": 3.5384, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087953193309848e-05, |
| "loss": 3.545, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0871162364391696e-05, |
| "loss": 3.5465, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0862776416881176e-05, |
| "loss": 3.5399, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0854390469370656e-05, |
| "loss": 3.5201, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0846004521860136e-05, |
| "loss": 3.5406, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.083763495315335e-05, |
| "loss": 3.5554, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082924900564283e-05, |
| "loss": 3.5502, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0820863058132305e-05, |
| "loss": 3.5418, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0812477110621785e-05, |
| "loss": 3.5343, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0804107541915e-05, |
| "loss": 3.5463, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0795721594404474e-05, |
| "loss": 3.5303, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0787335646893954e-05, |
| "loss": 3.5577, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0778949699383434e-05, |
| "loss": 3.5284, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0770563751872914e-05, |
| "loss": 3.5569, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076219418316612e-05, |
| "loss": 3.5508, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.075380823565561e-05, |
| "loss": 3.5271, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.074542228814509e-05, |
| "loss": 3.5427, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.073703634063457e-05, |
| "loss": 3.5483, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072866677192778e-05, |
| "loss": 3.5243, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072028082441726e-05, |
| "loss": 3.5332, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071189487690674e-05, |
| "loss": 3.554, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.070350892939622e-05, |
| "loss": 3.5381, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06951229818857e-05, |
| "loss": 3.5349, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.068675341317891e-05, |
| "loss": 3.5283, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.067836746566839e-05, |
| "loss": 3.5401, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.066998151815787e-05, |
| "loss": 3.5302, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.066159557064735e-05, |
| "loss": 3.5448, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0653226001940564e-05, |
| "loss": 3.5321, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0644840054430044e-05, |
| "loss": 3.5439, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0636454106919524e-05, |
| "loss": 3.5477, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0628068159409004e-05, |
| "loss": 3.5415, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061969859070221e-05, |
| "loss": 3.5379, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061131264319169e-05, |
| "loss": 3.5379, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.060292669568117e-05, |
| "loss": 3.5445, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.059454074817065e-05, |
| "loss": 3.5412, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058617117946386e-05, |
| "loss": 3.539, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057778523195334e-05, |
| "loss": 3.5385, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.056939928444282e-05, |
| "loss": 3.5411, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05610133369323e-05, |
| "loss": 3.5402, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055264376822552e-05, |
| "loss": 3.5238, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0544257820715e-05, |
| "loss": 3.5389, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.053587187320448e-05, |
| "loss": 3.5351, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.052748592569396e-05, |
| "loss": 3.539, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.051909997818344e-05, |
| "loss": 3.5398, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0510730409476646e-05, |
| "loss": 3.5383, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0502344461966126e-05, |
| "loss": 3.5321, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0493958514455606e-05, |
| "loss": 3.5257, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0485572566945086e-05, |
| "loss": 3.5337, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0477202998238295e-05, |
| "loss": 3.5448, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.046881705072778e-05, |
| "loss": 3.5394, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.046043110321726e-05, |
| "loss": 3.5484, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.045204515570674e-05, |
| "loss": 3.5366, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0443675586999948e-05, |
| "loss": 3.5353, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0435289639489428e-05, |
| "loss": 3.5315, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0426903691978907e-05, |
| "loss": 3.5464, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0418517744468387e-05, |
| "loss": 3.5385, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.04101481757616e-05, |
| "loss": 3.5482, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.040176222825108e-05, |
| "loss": 3.5339, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.039337628074056e-05, |
| "loss": 3.5318, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.038499033323004e-05, |
| "loss": 3.5503, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.037662076452325e-05, |
| "loss": 3.53, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0368234817012732e-05, |
| "loss": 3.5313, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0359848869502212e-05, |
| "loss": 3.5383, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0351462921991692e-05, |
| "loss": 3.5381, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0343076974481172e-05, |
| "loss": 3.5308, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.033470740577438e-05, |
| "loss": 3.5357, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.032632145826386e-05, |
| "loss": 3.5274, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.031793551075334e-05, |
| "loss": 3.5379, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0309549563242824e-05, |
| "loss": 3.5385, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0301179994536034e-05, |
| "loss": 3.5361, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0292794047025513e-05, |
| "loss": 3.5425, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0284408099514993e-05, |
| "loss": 3.556, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0276022152004473e-05, |
| "loss": 3.5393, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0267652583297686e-05, |
| "loss": 3.5384, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0259266635787166e-05, |
| "loss": 3.5334, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0250880688276646e-05, |
| "loss": 3.5337, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0242494740766126e-05, |
| "loss": 3.5315, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0234108793255606e-05, |
| "loss": 3.5339, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0225739224548815e-05, |
| "loss": 3.5319, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0217353277038295e-05, |
| "loss": 3.5428, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0208967329527778e-05, |
| "loss": 3.5354, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0200581382017258e-05, |
| "loss": 3.5389, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0192211813310467e-05, |
| "loss": 3.5338, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0183825865799947e-05, |
| "loss": 3.5324, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0175439918289427e-05, |
| "loss": 3.5319, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0167053970778907e-05, |
| "loss": 3.5464, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.015868440207212e-05, |
| "loss": 3.5491, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.01502984545616e-05, |
| "loss": 3.5424, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.014191250705108e-05, |
| "loss": 3.5275, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0133526559540553e-05, |
| "loss": 3.536, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.012515699083377e-05, |
| "loss": 3.5389, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.011677104332325e-05, |
| "loss": 3.5389, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.010838509581273e-05, |
| "loss": 3.5404, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0099999148302205e-05, |
| "loss": 3.5515, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.009162957959542e-05, |
| "loss": 3.5384, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.00832436320849e-05, |
| "loss": 3.55, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.007485768457438e-05, |
| "loss": 3.5339, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0066471737063857e-05, |
| "loss": 3.5401, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0058102168357073e-05, |
| "loss": 3.5352, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0049716220846553e-05, |
| "loss": 3.5283, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0041330273336026e-05, |
| "loss": 3.5455, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0032944325825506e-05, |
| "loss": 3.5259, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0024558378314986e-05, |
| "loss": 3.5352, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0016188809608202e-05, |
| "loss": 3.5337, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.000780286209768e-05, |
| "loss": 3.5347, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.851925849914551, |
| "eval_runtime": 303.1057, |
| "eval_samples_per_second": 1258.937, |
| "eval_steps_per_second": 39.343, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.999941691458716e-05, |
| "loss": 3.5305, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.999103096707664e-05, |
| "loss": 3.5348, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.998264501956612e-05, |
| "loss": 3.5425, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.997427545085933e-05, |
| "loss": 3.543, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.996588950334881e-05, |
| "loss": 3.5519, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.995750355583829e-05, |
| "loss": 3.532, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.994911760832777e-05, |
| "loss": 3.5318, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.994073166081725e-05, |
| "loss": 3.5289, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.993236209211046e-05, |
| "loss": 3.5412, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.992397614459994e-05, |
| "loss": 3.5333, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.991559019708942e-05, |
| "loss": 3.5345, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9907204249578903e-05, |
| "loss": 3.5337, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9898834680872112e-05, |
| "loss": 3.5288, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9890448733361592e-05, |
| "loss": 3.5302, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9882062785851072e-05, |
| "loss": 3.5259, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9873676838340552e-05, |
| "loss": 3.5235, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9865307269633765e-05, |
| "loss": 3.5309, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9856921322123245e-05, |
| "loss": 3.5324, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9848535374612725e-05, |
| "loss": 3.5289, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9840149427102204e-05, |
| "loss": 3.5477, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9831779858395414e-05, |
| "loss": 3.5333, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9823393910884894e-05, |
| "loss": 3.5369, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9815007963374373e-05, |
| "loss": 3.5302, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9806622015863857e-05, |
| "loss": 3.5423, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9798252447157066e-05, |
| "loss": 3.5293, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9789866499646546e-05, |
| "loss": 3.5355, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9781480552136026e-05, |
| "loss": 3.5274, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9773094604625506e-05, |
| "loss": 3.5339, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9764725035918718e-05, |
| "loss": 3.5241, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9756339088408198e-05, |
| "loss": 3.5314, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9747953140897678e-05, |
| "loss": 3.5382, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9739567193387158e-05, |
| "loss": 3.534, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9731197624680367e-05, |
| "loss": 3.5384, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9722811677169847e-05, |
| "loss": 3.5354, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9714425729659327e-05, |
| "loss": 3.5321, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.970603978214881e-05, |
| "loss": 3.5413, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.969767021344202e-05, |
| "loss": 3.525, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.96892842659315e-05, |
| "loss": 3.5291, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.968089831842098e-05, |
| "loss": 3.5168, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.967251237091046e-05, |
| "loss": 3.5255, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.966412642339994e-05, |
| "loss": 3.5223, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9655756854693152e-05, |
| "loss": 3.5406, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9647370907182632e-05, |
| "loss": 3.5268, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9638984959672112e-05, |
| "loss": 3.5255, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9630599012161592e-05, |
| "loss": 3.5327, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.96222294434548e-05, |
| "loss": 3.5315, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.961384349594428e-05, |
| "loss": 3.5243, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9605457548433764e-05, |
| "loss": 3.5131, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9597071600923244e-05, |
| "loss": 3.5198, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9588702032216453e-05, |
| "loss": 3.5416, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9580316084705933e-05, |
| "loss": 3.5385, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9571930137195413e-05, |
| "loss": 3.5262, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9563544189684893e-05, |
| "loss": 3.5216, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9555174620978106e-05, |
| "loss": 3.5324, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9546788673467586e-05, |
| "loss": 3.5178, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9538402725957065e-05, |
| "loss": 3.5364, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9530016778446545e-05, |
| "loss": 3.5171, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9521647209739755e-05, |
| "loss": 3.5433, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9513261262229234e-05, |
| "loss": 3.5344, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9504875314718718e-05, |
| "loss": 3.518, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9496489367208198e-05, |
| "loss": 3.5224, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9488103419697678e-05, |
| "loss": 3.5373, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9479733850990887e-05, |
| "loss": 3.509, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9471347903480367e-05, |
| "loss": 3.5198, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9462961955969847e-05, |
| "loss": 3.5387, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9454576008459327e-05, |
| "loss": 3.5272, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.944620643975254e-05, |
| "loss": 3.5211, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.943782049224202e-05, |
| "loss": 3.5149, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.94294345447315e-05, |
| "loss": 3.5205, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.942104859722098e-05, |
| "loss": 3.5173, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9412679028514188e-05, |
| "loss": 3.5333, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.940429308100367e-05, |
| "loss": 3.5188, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.939590713349315e-05, |
| "loss": 3.5304, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.938752118598263e-05, |
| "loss": 3.5282, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.937915161727584e-05, |
| "loss": 3.5329, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.937076566976532e-05, |
| "loss": 3.5213, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93623797222548e-05, |
| "loss": 3.5267, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.935399377474428e-05, |
| "loss": 3.5281, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9345607827233757e-05, |
| "loss": 3.5282, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9337238258526973e-05, |
| "loss": 3.5252, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9328852311016453e-05, |
| "loss": 3.5234, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9320466363505933e-05, |
| "loss": 3.528, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.931208041599541e-05, |
| "loss": 3.5241, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9303710847288622e-05, |
| "loss": 3.5165, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9295324899778105e-05, |
| "loss": 3.5195, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.928693895226758e-05, |
| "loss": 3.5274, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9278553004757058e-05, |
| "loss": 3.5293, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9270183436050274e-05, |
| "loss": 3.5212, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9261797488539754e-05, |
| "loss": 3.5228, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.925341154102923e-05, |
| "loss": 3.5165, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.924502559351871e-05, |
| "loss": 3.5132, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9236656024811926e-05, |
| "loss": 3.5179, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9228270077301406e-05, |
| "loss": 3.5282, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.921988412979088e-05, |
| "loss": 3.5258, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9211498182280363e-05, |
| "loss": 3.5362, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9203128613573575e-05, |
| "loss": 3.5306, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9194742666063052e-05, |
| "loss": 3.5157, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9186356718552532e-05, |
| "loss": 3.5197, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9177970771042012e-05, |
| "loss": 3.5274, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9169601202335228e-05, |
| "loss": 3.5287, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9161215254824704e-05, |
| "loss": 3.5322, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9152829307314184e-05, |
| "loss": 3.5198, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9144443359803664e-05, |
| "loss": 3.5204, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.913607379109688e-05, |
| "loss": 3.5383, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9127687843586353e-05, |
| "loss": 3.515, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9119301896075833e-05, |
| "loss": 3.5207, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9110915948565313e-05, |
| "loss": 3.5219, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9102546379858526e-05, |
| "loss": 3.5271, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9094160432348006e-05, |
| "loss": 3.5137, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9085774484837486e-05, |
| "loss": 3.5219, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9077388537326966e-05, |
| "loss": 3.5167, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9069018968620175e-05, |
| "loss": 3.5265, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9060633021109658e-05, |
| "loss": 3.5214, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9052247073599138e-05, |
| "loss": 3.5209, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9043861126088618e-05, |
| "loss": 3.5311, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9035491557381827e-05, |
| "loss": 3.5381, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9027105609871307e-05, |
| "loss": 3.5272, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9018719662360787e-05, |
| "loss": 3.5201, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9010333714850267e-05, |
| "loss": 3.5208, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.900194776733975e-05, |
| "loss": 3.5176, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.899357819863296e-05, |
| "loss": 3.5225, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.898519225112244e-05, |
| "loss": 3.5202, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.897680630361192e-05, |
| "loss": 3.5169, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.89684203561014e-05, |
| "loss": 3.5251, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8960050787394612e-05, |
| "loss": 3.5231, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.895166483988409e-05, |
| "loss": 3.523, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.894327889237357e-05, |
| "loss": 3.5206, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.893489294486305e-05, |
| "loss": 3.5187, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.892652337615626e-05, |
| "loss": 3.5197, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.891813742864574e-05, |
| "loss": 3.5315, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.890975148113522e-05, |
| "loss": 3.5337, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8901381912428433e-05, |
| "loss": 3.5292, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8892995964917913e-05, |
| "loss": 3.5106, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8884610017407393e-05, |
| "loss": 3.5258, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8876224069896873e-05, |
| "loss": 3.5226, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8867854501190082e-05, |
| "loss": 3.5261, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8859468553679565e-05, |
| "loss": 3.522, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8851082606169045e-05, |
| "loss": 3.5374, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8842696658658525e-05, |
| "loss": 3.5215, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8834327089951734e-05, |
| "loss": 3.5396, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8825941142441214e-05, |
| "loss": 3.5176, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8817555194930694e-05, |
| "loss": 3.5312, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8809169247420174e-05, |
| "loss": 3.5192, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8800799678713387e-05, |
| "loss": 3.5168, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8792413731202867e-05, |
| "loss": 3.5286, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8784027783692347e-05, |
| "loss": 3.5153, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8775641836181827e-05, |
| "loss": 3.5218, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8767272267475036e-05, |
| "loss": 3.5186, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.875888631996452e-05, |
| "loss": 3.5203, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8537473678588867, |
| "eval_runtime": 304.036, |
| "eval_samples_per_second": 1255.085, |
| "eval_steps_per_second": 39.222, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8750500372454e-05, |
| "loss": 3.5199, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.874211442494348e-05, |
| "loss": 3.5184, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8733744856236688e-05, |
| "loss": 3.5309, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8725358908726168e-05, |
| "loss": 3.5294, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8716972961215648e-05, |
| "loss": 3.5384, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8708587013705128e-05, |
| "loss": 3.5178, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.870020106619461e-05, |
| "loss": 3.5192, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.869183149748782e-05, |
| "loss": 3.5134, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.86834455499773e-05, |
| "loss": 3.5284, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.867505960246678e-05, |
| "loss": 3.5194, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.866669003375999e-05, |
| "loss": 3.5194, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8658304086249473e-05, |
| "loss": 3.5227, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8649934517542682e-05, |
| "loss": 3.5126, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8641548570032162e-05, |
| "loss": 3.5203, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8633162622521642e-05, |
| "loss": 3.5095, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.862477667501112e-05, |
| "loss": 3.5114, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.86163907275006e-05, |
| "loss": 3.5135, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.860800477999008e-05, |
| "loss": 3.5163, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8599618832479565e-05, |
| "loss": 3.5143, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8591232884969045e-05, |
| "loss": 3.5362, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8582863316262254e-05, |
| "loss": 3.5169, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8574477368751734e-05, |
| "loss": 3.5219, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8566091421241214e-05, |
| "loss": 3.5185, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8557705473730694e-05, |
| "loss": 3.5245, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8549335905023906e-05, |
| "loss": 3.5201, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8540949957513386e-05, |
| "loss": 3.5259, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8532564010002866e-05, |
| "loss": 3.5111, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8524178062492346e-05, |
| "loss": 3.5193, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8515808493785555e-05, |
| "loss": 3.5065, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8507422546275035e-05, |
| "loss": 3.5226, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.849903659876452e-05, |
| "loss": 3.5212, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8490650651254e-05, |
| "loss": 3.522, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8482281082547208e-05, |
| "loss": 3.5274, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8473895135036688e-05, |
| "loss": 3.5188, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8465509187526168e-05, |
| "loss": 3.5212, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8457123240015648e-05, |
| "loss": 3.5245, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.844875367130886e-05, |
| "loss": 3.5131, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.844036772379834e-05, |
| "loss": 3.5115, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.843198177628782e-05, |
| "loss": 3.503, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.84235958287773e-05, |
| "loss": 3.5142, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.841522626007051e-05, |
| "loss": 3.512, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.840684031255999e-05, |
| "loss": 3.5273, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.839845436504947e-05, |
| "loss": 3.5141, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8390068417538952e-05, |
| "loss": 3.5105, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.838169884883216e-05, |
| "loss": 3.5212, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.837331290132164e-05, |
| "loss": 3.5118, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.836492695381112e-05, |
| "loss": 3.5145, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.83565410063006e-05, |
| "loss": 3.5025, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8348171437593814e-05, |
| "loss": 3.5015, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8339785490083294e-05, |
| "loss": 3.5289, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8331399542572774e-05, |
| "loss": 3.5243, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8323013595062254e-05, |
| "loss": 3.5098, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8314644026355463e-05, |
| "loss": 3.5129, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8306258078844943e-05, |
| "loss": 3.5168, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8297872131334423e-05, |
| "loss": 3.5051, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.82894861838239e-05, |
| "loss": 3.5233, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8281116615117115e-05, |
| "loss": 3.5012, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8272730667606595e-05, |
| "loss": 3.5275, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8264344720096075e-05, |
| "loss": 3.5247, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.825595877258555e-05, |
| "loss": 3.5073, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.824757282507503e-05, |
| "loss": 3.5078, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8239203256368247e-05, |
| "loss": 3.5226, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8230817308857727e-05, |
| "loss": 3.4932, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.82224313613472e-05, |
| "loss": 3.5111, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.821404541383668e-05, |
| "loss": 3.5239, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8205675845129896e-05, |
| "loss": 3.5166, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8197289897619373e-05, |
| "loss": 3.5051, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8188903950108853e-05, |
| "loss": 3.5008, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8180518002598333e-05, |
| "loss": 3.5091, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.817214843389155e-05, |
| "loss": 3.5068, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8163762486381022e-05, |
| "loss": 3.5145, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8155376538870505e-05, |
| "loss": 3.5051, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8146990591359985e-05, |
| "loss": 3.5169, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.81386210226532e-05, |
| "loss": 3.5185, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8130235075142674e-05, |
| "loss": 3.5203, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8121849127632154e-05, |
| "loss": 3.507, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8113463180121634e-05, |
| "loss": 3.513, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8105093611414847e-05, |
| "loss": 3.5139, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8096707663904326e-05, |
| "loss": 3.5127, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8088321716393806e-05, |
| "loss": 3.5153, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8079935768883286e-05, |
| "loss": 3.5122, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8071566200176495e-05, |
| "loss": 3.5152, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8063180252665975e-05, |
| "loss": 3.5108, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.805479430515546e-05, |
| "loss": 3.4997, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.804640835764494e-05, |
| "loss": 3.5058, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8038038788938148e-05, |
| "loss": 3.5102, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8029652841427628e-05, |
| "loss": 3.5166, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8021266893917108e-05, |
| "loss": 3.5116, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8012880946406588e-05, |
| "loss": 3.5085, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.80045113776998e-05, |
| "loss": 3.5033, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.799612543018928e-05, |
| "loss": 3.5009, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.798773948267876e-05, |
| "loss": 3.503, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.797935353516824e-05, |
| "loss": 3.5165, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.797098396646145e-05, |
| "loss": 3.5131, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.796259801895093e-05, |
| "loss": 3.5212, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7954212071440412e-05, |
| "loss": 3.5142, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7945826123929892e-05, |
| "loss": 3.504, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.79374565552231e-05, |
| "loss": 3.5074, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.792907060771258e-05, |
| "loss": 3.5119, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.792068466020206e-05, |
| "loss": 3.5186, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.791229871269154e-05, |
| "loss": 3.5185, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7903929143984754e-05, |
| "loss": 3.509, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7895543196474234e-05, |
| "loss": 3.5024, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7887157248963714e-05, |
| "loss": 3.5233, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7878771301453194e-05, |
| "loss": 3.4989, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7870401732746403e-05, |
| "loss": 3.5092, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7862015785235883e-05, |
| "loss": 3.509, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7853629837725366e-05, |
| "loss": 3.5112, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7845243890214846e-05, |
| "loss": 3.4997, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7836874321508055e-05, |
| "loss": 3.5125, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7828488373997535e-05, |
| "loss": 3.5019, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7820102426487015e-05, |
| "loss": 3.51, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7811716478976495e-05, |
| "loss": 3.5131, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7803346910269708e-05, |
| "loss": 3.5044, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7794960962759187e-05, |
| "loss": 3.5218, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7786575015248667e-05, |
| "loss": 3.5252, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7778189067738147e-05, |
| "loss": 3.5141, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7769803120227627e-05, |
| "loss": 3.5084, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7761433551520836e-05, |
| "loss": 3.5087, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.775304760401032e-05, |
| "loss": 3.5089, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.77446616564998e-05, |
| "loss": 3.5035, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.773627570898928e-05, |
| "loss": 3.5125, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.772790614028249e-05, |
| "loss": 3.4987, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.771952019277197e-05, |
| "loss": 3.5088, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.771113424526145e-05, |
| "loss": 3.513, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.770276467655466e-05, |
| "loss": 3.5153, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.769437872904414e-05, |
| "loss": 3.5022, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.768599278153362e-05, |
| "loss": 3.5045, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.76776068340231e-05, |
| "loss": 3.513, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.766923726531631e-05, |
| "loss": 3.5116, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.766085131780579e-05, |
| "loss": 3.5205, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7652465370295273e-05, |
| "loss": 3.5167, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7644079422784753e-05, |
| "loss": 3.4978, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7635709854077963e-05, |
| "loss": 3.5114, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7627323906567442e-05, |
| "loss": 3.5121, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7618937959056922e-05, |
| "loss": 3.5088, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7610552011546402e-05, |
| "loss": 3.5107, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7602182442839615e-05, |
| "loss": 3.5198, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7593796495329095e-05, |
| "loss": 3.5104, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7585410547818575e-05, |
| "loss": 3.5279, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7577024600308055e-05, |
| "loss": 3.5076, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7568655031601264e-05, |
| "loss": 3.5138, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7560269084090744e-05, |
| "loss": 3.5072, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7551883136580227e-05, |
| "loss": 3.5045, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7543497189069707e-05, |
| "loss": 3.5122, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7535127620362916e-05, |
| "loss": 3.5067, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7526741672852396e-05, |
| "loss": 3.5079, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7518355725341876e-05, |
| "loss": 3.5023, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7509969777831356e-05, |
| "loss": 3.5052, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8550965785980225, |
| "eval_runtime": 303.9798, |
| "eval_samples_per_second": 1255.317, |
| "eval_steps_per_second": 39.23, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7501583830320836e-05, |
| "loss": 3.5061, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7493197882810316e-05, |
| "loss": 3.5062, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.74848119352998e-05, |
| "loss": 3.5186, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.747642598778928e-05, |
| "loss": 3.5128, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.746805641908249e-05, |
| "loss": 3.5287, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.745967047157197e-05, |
| "loss": 3.5033, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7451284524061448e-05, |
| "loss": 3.5089, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.744291495535466e-05, |
| "loss": 3.4983, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.743452900784414e-05, |
| "loss": 3.5157, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.742614306033362e-05, |
| "loss": 3.5102, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.74177571128231e-05, |
| "loss": 3.5039, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.740938754411631e-05, |
| "loss": 3.5152, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7401017975409522e-05, |
| "loss": 3.4981, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7392632027899002e-05, |
| "loss": 3.5096, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7384246080388482e-05, |
| "loss": 3.4954, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7375860132877962e-05, |
| "loss": 3.5001, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7367474185367442e-05, |
| "loss": 3.4946, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7359088237856922e-05, |
| "loss": 3.5062, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.73507022903464e-05, |
| "loss": 3.5016, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.734231634283588e-05, |
| "loss": 3.522, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7333946774129094e-05, |
| "loss": 3.5074, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7325560826618574e-05, |
| "loss": 3.5076, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7317174879108048e-05, |
| "loss": 3.5039, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7308788931597527e-05, |
| "loss": 3.5132, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7300419362890743e-05, |
| "loss": 3.5046, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7292033415380223e-05, |
| "loss": 3.5143, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.72836474678697e-05, |
| "loss": 3.4998, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.727526152035918e-05, |
| "loss": 3.506, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7266891951652396e-05, |
| "loss": 3.4925, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.725850600414187e-05, |
| "loss": 3.51, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7250120056631352e-05, |
| "loss": 3.5095, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7241750487924568e-05, |
| "loss": 3.5082, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7233364540414048e-05, |
| "loss": 3.5171, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.722497859290352e-05, |
| "loss": 3.5073, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7216592645393e-05, |
| "loss": 3.5035, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7208223076686217e-05, |
| "loss": 3.5143, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7199837129175697e-05, |
| "loss": 3.5027, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7191451181665174e-05, |
| "loss": 3.498, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7183065234154654e-05, |
| "loss": 3.4911, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.717469566544787e-05, |
| "loss": 3.4996, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7166309717937343e-05, |
| "loss": 3.4975, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7157923770426823e-05, |
| "loss": 3.5123, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7149537822916306e-05, |
| "loss": 3.5069, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7141168254209522e-05, |
| "loss": 3.4943, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7132782306698995e-05, |
| "loss": 3.5103, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7124396359188475e-05, |
| "loss": 3.5013, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7116010411677955e-05, |
| "loss": 3.4965, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7107624464167435e-05, |
| "loss": 3.4919, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7099254895460647e-05, |
| "loss": 3.4857, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7090868947950127e-05, |
| "loss": 3.5157, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7082483000439607e-05, |
| "loss": 3.511, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7074097052929087e-05, |
| "loss": 3.4982, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7065727484222296e-05, |
| "loss": 3.5003, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7057341536711776e-05, |
| "loss": 3.5074, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704895558920126e-05, |
| "loss": 3.4878, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704056964169074e-05, |
| "loss": 3.5109, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.703220007298395e-05, |
| "loss": 3.4909, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.702381412547343e-05, |
| "loss": 3.5091, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.701542817796291e-05, |
| "loss": 3.5152, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.700704223045239e-05, |
| "loss": 3.4968, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.69986726617456e-05, |
| "loss": 3.4899, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.699028671423508e-05, |
| "loss": 3.509, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.698190076672456e-05, |
| "loss": 3.4837, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.697351481921404e-05, |
| "loss": 3.497, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.696514525050725e-05, |
| "loss": 3.5067, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.695675930299673e-05, |
| "loss": 3.5062, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6948373355486213e-05, |
| "loss": 3.4941, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6939987407975693e-05, |
| "loss": 3.488, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6931617839268902e-05, |
| "loss": 3.4912, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6923231891758382e-05, |
| "loss": 3.491, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6914845944247862e-05, |
| "loss": 3.5036, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6906459996737342e-05, |
| "loss": 3.4932, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6898090428030555e-05, |
| "loss": 3.5066, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6889704480520035e-05, |
| "loss": 3.505, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6881318533009515e-05, |
| "loss": 3.5077, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6872932585498994e-05, |
| "loss": 3.493, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6864563016792204e-05, |
| "loss": 3.5018, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6856177069281684e-05, |
| "loss": 3.5056, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6847791121771167e-05, |
| "loss": 3.4981, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6839405174260647e-05, |
| "loss": 3.5039, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6831035605553856e-05, |
| "loss": 3.4998, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6822649658043336e-05, |
| "loss": 3.5, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6814263710532816e-05, |
| "loss": 3.4969, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6805877763022296e-05, |
| "loss": 3.4856, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6797508194315508e-05, |
| "loss": 3.4977, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6789122246804988e-05, |
| "loss": 3.495, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6780736299294468e-05, |
| "loss": 3.5047, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6772350351783948e-05, |
| "loss": 3.4994, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6763980783077157e-05, |
| "loss": 3.4978, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6755594835566637e-05, |
| "loss": 3.4928, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.674720888805612e-05, |
| "loss": 3.4894, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.67388229405456e-05, |
| "loss": 3.4875, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.673045337183881e-05, |
| "loss": 3.5015, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.672206742432829e-05, |
| "loss": 3.5082, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.671368147681777e-05, |
| "loss": 3.5056, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.670529552930725e-05, |
| "loss": 3.4998, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6696925960600462e-05, |
| "loss": 3.4919, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6688540013089942e-05, |
| "loss": 3.4967, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6680154065579422e-05, |
| "loss": 3.4967, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6671768118068902e-05, |
| "loss": 3.5073, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.666339854936211e-05, |
| "loss": 3.505, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.665501260185159e-05, |
| "loss": 3.4977, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6646626654341074e-05, |
| "loss": 3.4909, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6638240706830554e-05, |
| "loss": 3.5135, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6629871138123763e-05, |
| "loss": 3.4854, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6621485190613243e-05, |
| "loss": 3.5001, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6613099243102723e-05, |
| "loss": 3.4903, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6604713295592203e-05, |
| "loss": 3.4999, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6596343726885416e-05, |
| "loss": 3.4901, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6587957779374896e-05, |
| "loss": 3.5026, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6579571831864376e-05, |
| "loss": 3.493, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6571185884353855e-05, |
| "loss": 3.4927, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6562816315647065e-05, |
| "loss": 3.5011, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6554430368136545e-05, |
| "loss": 3.4901, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6546044420626024e-05, |
| "loss": 3.5115, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6537658473115508e-05, |
| "loss": 3.5097, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6529288904408717e-05, |
| "loss": 3.5016, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6520902956898197e-05, |
| "loss": 3.4958, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6512517009387677e-05, |
| "loss": 3.4958, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6504131061877157e-05, |
| "loss": 3.4956, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.649576149317037e-05, |
| "loss": 3.4933, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.648737554565985e-05, |
| "loss": 3.5002, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.647898959814933e-05, |
| "loss": 3.4872, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.647060365063881e-05, |
| "loss": 3.4981, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6462234081932018e-05, |
| "loss": 3.5008, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6453848134421498e-05, |
| "loss": 3.5011, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6445462186910978e-05, |
| "loss": 3.492, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.643707623940046e-05, |
| "loss": 3.4918, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.642870667069367e-05, |
| "loss": 3.5024, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.642032072318315e-05, |
| "loss": 3.5003, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.641193477567263e-05, |
| "loss": 3.5024, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.640354882816211e-05, |
| "loss": 3.5079, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6395179259455323e-05, |
| "loss": 3.4874, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6386793311944803e-05, |
| "loss": 3.4994, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6378407364434283e-05, |
| "loss": 3.5004, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6370021416923763e-05, |
| "loss": 3.4954, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6361651848216972e-05, |
| "loss": 3.4978, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6353265900706452e-05, |
| "loss": 3.5096, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6344879953195932e-05, |
| "loss": 3.4965, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6336494005685415e-05, |
| "loss": 3.516, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6328124436978624e-05, |
| "loss": 3.4946, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6319738489468104e-05, |
| "loss": 3.4982, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6311352541957584e-05, |
| "loss": 3.4975, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6302966594447064e-05, |
| "loss": 3.4928, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6294597025740277e-05, |
| "loss": 3.4979, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6286211078229757e-05, |
| "loss": 3.4959, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6277825130719237e-05, |
| "loss": 3.4944, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6269439183208716e-05, |
| "loss": 3.49, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6261069614501926e-05, |
| "loss": 3.4959, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.854766845703125, |
| "eval_runtime": 302.7824, |
| "eval_samples_per_second": 1260.281, |
| "eval_steps_per_second": 39.385, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6252683666991406e-05, |
| "loss": 3.4945, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6244297719480885e-05, |
| "loss": 3.4901, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.623591177197037e-05, |
| "loss": 3.5037, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6227525824459842e-05, |
| "loss": 3.4996, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6219156255753058e-05, |
| "loss": 3.5176, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6210770308242538e-05, |
| "loss": 3.4926, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6202384360732018e-05, |
| "loss": 3.4967, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6193998413221494e-05, |
| "loss": 3.4841, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.618562884451471e-05, |
| "loss": 3.502, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.617724289700419e-05, |
| "loss": 3.4964, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6168856949493663e-05, |
| "loss": 3.4896, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6160471001983143e-05, |
| "loss": 3.4974, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6152117812080092e-05, |
| "loss": 3.491, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.614373186456957e-05, |
| "loss": 3.495, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.613534591705905e-05, |
| "loss": 3.4842, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.612695996954853e-05, |
| "loss": 3.4862, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.611857402203801e-05, |
| "loss": 3.4824, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.611018807452749e-05, |
| "loss": 3.4917, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6101802127016968e-05, |
| "loss": 3.4898, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6093416179506448e-05, |
| "loss": 3.5095, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6085046610799664e-05, |
| "loss": 3.494, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6076660663289137e-05, |
| "loss": 3.5011, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6068274715778617e-05, |
| "loss": 3.4901, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6059888768268097e-05, |
| "loss": 3.499, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6051519199561313e-05, |
| "loss": 3.4924, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.604313325205079e-05, |
| "loss": 3.4984, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.603474730454027e-05, |
| "loss": 3.4898, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.602636135702975e-05, |
| "loss": 3.4878, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6017991788322965e-05, |
| "loss": 3.4834, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.600960584081244e-05, |
| "loss": 3.4951, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6001219893301922e-05, |
| "loss": 3.5007, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5992833945791402e-05, |
| "loss": 3.4951, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.598446437708461e-05, |
| "loss": 3.5066, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.597607842957409e-05, |
| "loss": 3.4956, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.596769248206357e-05, |
| "loss": 3.489, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5959322913356787e-05, |
| "loss": 3.4971, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5950936965846263e-05, |
| "loss": 3.4973, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5942551018335743e-05, |
| "loss": 3.4802, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5934165070825223e-05, |
| "loss": 3.4793, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.592579550211844e-05, |
| "loss": 3.4859, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5917409554607912e-05, |
| "loss": 3.4923, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5909023607097392e-05, |
| "loss": 3.4953, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5900637659586875e-05, |
| "loss": 3.4953, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5892268090880085e-05, |
| "loss": 3.4816, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5883882143369564e-05, |
| "loss": 3.4964, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5875496195859044e-05, |
| "loss": 3.4907, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5867110248348524e-05, |
| "loss": 3.4866, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5858724300838004e-05, |
| "loss": 3.4787, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5850354732131217e-05, |
| "loss": 3.4682, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5841968784620697e-05, |
| "loss": 3.5107, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5833582837110177e-05, |
| "loss": 3.4969, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5825196889599657e-05, |
| "loss": 3.4893, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5816827320892866e-05, |
| "loss": 3.4895, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5808441373382346e-05, |
| "loss": 3.4868, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.580005542587183e-05, |
| "loss": 3.4784, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.579166947836131e-05, |
| "loss": 3.4946, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5783299909654518e-05, |
| "loss": 3.4837, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5774913962143998e-05, |
| "loss": 3.4916, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5766528014633478e-05, |
| "loss": 3.5055, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.575815844592669e-05, |
| "loss": 3.4852, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.574977249841617e-05, |
| "loss": 3.4769, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.574138655090565e-05, |
| "loss": 3.5023, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.573300060339513e-05, |
| "loss": 3.4727, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.572463103468834e-05, |
| "loss": 3.4807, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.571624508717782e-05, |
| "loss": 3.4982, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.57078591396673e-05, |
| "loss": 3.4935, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5699473192156783e-05, |
| "loss": 3.4823, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5691103623449992e-05, |
| "loss": 3.4827, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5682717675939472e-05, |
| "loss": 3.476, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5674331728428952e-05, |
| "loss": 3.4797, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5665945780918432e-05, |
| "loss": 3.4935, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5657576212211644e-05, |
| "loss": 3.4765, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5649190264701124e-05, |
| "loss": 3.4965, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5640804317190604e-05, |
| "loss": 3.4878, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5632418369680084e-05, |
| "loss": 3.4997, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5624048800973293e-05, |
| "loss": 3.4831, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5615662853462773e-05, |
| "loss": 3.4834, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5607276905952253e-05, |
| "loss": 3.4922, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5598890958441733e-05, |
| "loss": 3.4825, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5590521389734946e-05, |
| "loss": 3.4952, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5582135442224425e-05, |
| "loss": 3.4872, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5573749494713905e-05, |
| "loss": 3.4859, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5565363547203385e-05, |
| "loss": 3.4873, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5556993978496594e-05, |
| "loss": 3.4759, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5548608030986078e-05, |
| "loss": 3.4831, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5540222083475558e-05, |
| "loss": 3.4865, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5531836135965038e-05, |
| "loss": 3.4916, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5523466567258247e-05, |
| "loss": 3.4846, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5515080619747727e-05, |
| "loss": 3.4863, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5506694672237207e-05, |
| "loss": 3.4791, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5498308724726687e-05, |
| "loss": 3.4768, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.54899391560199e-05, |
| "loss": 3.4783, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.548155320850938e-05, |
| "loss": 3.4841, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.547316726099886e-05, |
| "loss": 3.4959, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.546478131348834e-05, |
| "loss": 3.4997, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5456411744781548e-05, |
| "loss": 3.4868, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.544802579727103e-05, |
| "loss": 3.4757, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.543963984976051e-05, |
| "loss": 3.4843, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.543125390224999e-05, |
| "loss": 3.4854, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.54228843335432e-05, |
| "loss": 3.4912, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.541449838603268e-05, |
| "loss": 3.4926, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.540611243852216e-05, |
| "loss": 3.4868, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.539772649101164e-05, |
| "loss": 3.4782, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5389356922304853e-05, |
| "loss": 3.5014, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5380970974794333e-05, |
| "loss": 3.4704, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5372585027283813e-05, |
| "loss": 3.4893, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5364199079773293e-05, |
| "loss": 3.4767, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5355829511066502e-05, |
| "loss": 3.4875, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5347443563555985e-05, |
| "loss": 3.4772, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5339057616045465e-05, |
| "loss": 3.4882, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5330671668534945e-05, |
| "loss": 3.4819, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5322302099828154e-05, |
| "loss": 3.4803, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5313916152317634e-05, |
| "loss": 3.4867, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5305530204807114e-05, |
| "loss": 3.4792, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5297144257296594e-05, |
| "loss": 3.4973, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5288774688589807e-05, |
| "loss": 3.4967, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5280388741079286e-05, |
| "loss": 3.4898, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5272002793568766e-05, |
| "loss": 3.4829, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5263616846058246e-05, |
| "loss": 3.4861, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5255247277351455e-05, |
| "loss": 3.4809, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.524686132984094e-05, |
| "loss": 3.4825, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.523847538233042e-05, |
| "loss": 3.4879, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.52300894348199e-05, |
| "loss": 3.4712, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5221719866113108e-05, |
| "loss": 3.487, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5213333918602588e-05, |
| "loss": 3.4858, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5204947971092068e-05, |
| "loss": 3.49, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5196562023581548e-05, |
| "loss": 3.479, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.518819245487476e-05, |
| "loss": 3.4808, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.517980650736424e-05, |
| "loss": 3.4835, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.517142055985372e-05, |
| "loss": 3.4954, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.51630346123432e-05, |
| "loss": 3.4876, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.515466504363641e-05, |
| "loss": 3.4964, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5146279096125892e-05, |
| "loss": 3.4732, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5137893148615372e-05, |
| "loss": 3.4895, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5129507201104852e-05, |
| "loss": 3.4885, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.512113763239806e-05, |
| "loss": 3.4827, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.511275168488754e-05, |
| "loss": 3.4838, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.510436573737702e-05, |
| "loss": 3.4984, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.50959797898665e-05, |
| "loss": 3.4864, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5087610221159714e-05, |
| "loss": 3.5059, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5079224273649194e-05, |
| "loss": 3.4808, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5070838326138674e-05, |
| "loss": 3.4894, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5062452378628154e-05, |
| "loss": 3.481, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5054082809921363e-05, |
| "loss": 3.4822, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5045696862410846e-05, |
| "loss": 3.4874, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5037310914900326e-05, |
| "loss": 3.4835, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5028924967389806e-05, |
| "loss": 3.4811, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5020555398683015e-05, |
| "loss": 3.4755, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5012169451172495e-05, |
| "loss": 3.4838, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8566389083862305, |
| "eval_runtime": 309.8866, |
| "eval_samples_per_second": 1231.389, |
| "eval_steps_per_second": 38.482, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.5003783503661975e-05, |
| "loss": 3.4859, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4995397556151455e-05, |
| "loss": 3.4788, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4987027987444668e-05, |
| "loss": 3.4942, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4978642039934144e-05, |
| "loss": 3.4882, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4970256092423624e-05, |
| "loss": 3.5037, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4961870144913104e-05, |
| "loss": 3.4845, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4953500576206316e-05, |
| "loss": 3.4838, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4945114628695796e-05, |
| "loss": 3.4744, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4936728681185276e-05, |
| "loss": 3.4889, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4928342733674756e-05, |
| "loss": 3.4846, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4919973164967965e-05, |
| "loss": 3.4824, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4911587217457445e-05, |
| "loss": 3.4839, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.490321764875066e-05, |
| "loss": 3.4768, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.489483170124014e-05, |
| "loss": 3.4851, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4886445753729618e-05, |
| "loss": 3.4727, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4878059806219098e-05, |
| "loss": 3.4739, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4869673858708578e-05, |
| "loss": 3.473, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4861287911198058e-05, |
| "loss": 3.4837, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4852901963687538e-05, |
| "loss": 3.4758, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4844516016177018e-05, |
| "loss": 3.4953, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.483614644747023e-05, |
| "loss": 3.4837, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.482776049995971e-05, |
| "loss": 3.4918, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.481937455244919e-05, |
| "loss": 3.4768, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.481098860493867e-05, |
| "loss": 3.4846, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.480261903623188e-05, |
| "loss": 3.4815, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4794233088721362e-05, |
| "loss": 3.4884, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4785847141210842e-05, |
| "loss": 3.4773, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4777461193700322e-05, |
| "loss": 3.4775, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.476909162499353e-05, |
| "loss": 3.4719, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.476070567748301e-05, |
| "loss": 3.4804, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.475231972997249e-05, |
| "loss": 3.4877, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.474393378246197e-05, |
| "loss": 3.4839, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4735564213755184e-05, |
| "loss": 3.4927, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4727178266244664e-05, |
| "loss": 3.4804, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4718792318734144e-05, |
| "loss": 3.4794, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.471040637122362e-05, |
| "loss": 3.4887, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4702036802516833e-05, |
| "loss": 3.4864, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4693650855006316e-05, |
| "loss": 3.4678, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4685264907495796e-05, |
| "loss": 3.472, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4676878959985273e-05, |
| "loss": 3.4706, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4668509391278485e-05, |
| "loss": 3.4739, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4660123443767965e-05, |
| "loss": 3.4871, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.465173749625744e-05, |
| "loss": 3.4797, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4643351548746925e-05, |
| "loss": 3.4758, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4634981980040137e-05, |
| "loss": 3.4828, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4626596032529617e-05, |
| "loss": 3.4752, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4618210085019094e-05, |
| "loss": 3.4762, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4609824137508574e-05, |
| "loss": 3.4702, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4601454568801786e-05, |
| "loss": 3.4509, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.459306862129127e-05, |
| "loss": 3.4977, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4584682673780746e-05, |
| "loss": 3.4845, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4576296726270226e-05, |
| "loss": 3.4838, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.456792715756344e-05, |
| "loss": 3.4753, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4559541210052915e-05, |
| "loss": 3.477, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4551155262542395e-05, |
| "loss": 3.465, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.454276931503188e-05, |
| "loss": 3.4819, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.453439974632509e-05, |
| "loss": 3.475, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4526013798814568e-05, |
| "loss": 3.4779, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4517627851304048e-05, |
| "loss": 3.4938, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4509241903793528e-05, |
| "loss": 3.4729, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.450087233508674e-05, |
| "loss": 3.4689, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.449248638757622e-05, |
| "loss": 3.4903, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.44841004400657e-05, |
| "loss": 3.4592, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4475730871358912e-05, |
| "loss": 3.4726, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.446734492384839e-05, |
| "loss": 3.4841, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445895897633787e-05, |
| "loss": 3.4805, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445057302882735e-05, |
| "loss": 3.4692, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4442203460120565e-05, |
| "loss": 3.471, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.443381751261004e-05, |
| "loss": 3.46, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.442543156509952e-05, |
| "loss": 3.47, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4417045617589e-05, |
| "loss": 3.4797, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4408676048882214e-05, |
| "loss": 3.4676, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4400290101371694e-05, |
| "loss": 3.4825, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4391904153861174e-05, |
| "loss": 3.4789, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4383518206350654e-05, |
| "loss": 3.4889, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4375132258840134e-05, |
| "loss": 3.4714, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4366746311329614e-05, |
| "loss": 3.4711, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4358360363819093e-05, |
| "loss": 3.4806, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4349990795112303e-05, |
| "loss": 3.4683, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4341604847601786e-05, |
| "loss": 3.4846, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4333218900091266e-05, |
| "loss": 3.4765, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4324832952580746e-05, |
| "loss": 3.4748, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4316463383873955e-05, |
| "loss": 3.477, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4308093815167164e-05, |
| "loss": 3.4641, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4299707867656647e-05, |
| "loss": 3.4698, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4291321920146127e-05, |
| "loss": 3.4732, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4282935972635607e-05, |
| "loss": 3.4802, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4274550025125087e-05, |
| "loss": 3.4671, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4266164077614567e-05, |
| "loss": 3.4745, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4257778130104044e-05, |
| "loss": 3.4667, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4249392182593524e-05, |
| "loss": 3.47, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.424102261388674e-05, |
| "loss": 3.463, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.423263666637622e-05, |
| "loss": 3.4771, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4224250718865696e-05, |
| "loss": 3.4808, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.421588115015891e-05, |
| "loss": 3.4823, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.420749520264839e-05, |
| "loss": 3.4811, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4199109255137865e-05, |
| "loss": 3.4639, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.419072330762735e-05, |
| "loss": 3.4715, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.418235373892056e-05, |
| "loss": 3.4737, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.417396779141004e-05, |
| "loss": 3.481, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4165581843899517e-05, |
| "loss": 3.479, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4157195896388997e-05, |
| "loss": 3.474, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.414882632768221e-05, |
| "loss": 3.4707, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4140440380171693e-05, |
| "loss": 3.4876, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.413205443266117e-05, |
| "loss": 3.4613, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.412366848515065e-05, |
| "loss": 3.4766, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4115298916443862e-05, |
| "loss": 3.4641, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.410691296893334e-05, |
| "loss": 3.4781, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.409852702142282e-05, |
| "loss": 3.4636, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4090141073912302e-05, |
| "loss": 3.4768, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4081771505205515e-05, |
| "loss": 3.4676, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.407338555769499e-05, |
| "loss": 3.4712, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.406499961018447e-05, |
| "loss": 3.4741, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.405661366267395e-05, |
| "loss": 3.4717, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4048244093967164e-05, |
| "loss": 3.4828, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4039858146456644e-05, |
| "loss": 3.486, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4031472198946123e-05, |
| "loss": 3.4758, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4023086251435603e-05, |
| "loss": 3.4763, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4014716682728813e-05, |
| "loss": 3.4713, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4006330735218292e-05, |
| "loss": 3.4708, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3997944787707772e-05, |
| "loss": 3.4683, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3989558840197256e-05, |
| "loss": 3.4802, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3981189271490465e-05, |
| "loss": 3.457, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3972803323979945e-05, |
| "loss": 3.4757, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3964417376469425e-05, |
| "loss": 3.4717, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3956031428958905e-05, |
| "loss": 3.4797, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3947661860252117e-05, |
| "loss": 3.4708, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3939275912741597e-05, |
| "loss": 3.4694, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3930889965231077e-05, |
| "loss": 3.4705, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3922504017720557e-05, |
| "loss": 3.4824, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3914134449013766e-05, |
| "loss": 3.4794, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3905748501503246e-05, |
| "loss": 3.4831, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3897362553992726e-05, |
| "loss": 3.4636, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.388897660648221e-05, |
| "loss": 3.4765, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.388060703777542e-05, |
| "loss": 3.4756, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.38722210902649e-05, |
| "loss": 3.4699, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.386383514275438e-05, |
| "loss": 3.4722, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.385544919524386e-05, |
| "loss": 3.4849, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.384707962653707e-05, |
| "loss": 3.4742, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.383869367902655e-05, |
| "loss": 3.4962, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.383030773151603e-05, |
| "loss": 3.4703, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.382192178400551e-05, |
| "loss": 3.4773, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.381355221529872e-05, |
| "loss": 3.467, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.38051662677882e-05, |
| "loss": 3.4711, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.379678032027768e-05, |
| "loss": 3.4773, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3788394372767163e-05, |
| "loss": 3.4722, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3780024804060372e-05, |
| "loss": 3.4748, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3771638856549852e-05, |
| "loss": 3.4571, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3763252909039332e-05, |
| "loss": 3.4748, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.858839988708496, |
| "eval_runtime": 310.4692, |
| "eval_samples_per_second": 1229.078, |
| "eval_steps_per_second": 38.41, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3754866961528812e-05, |
| "loss": 3.4729, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3746481014018292e-05, |
| "loss": 3.4699, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3738095066507772e-05, |
| "loss": 3.4809, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3729709118997252e-05, |
| "loss": 3.4775, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3721339550290464e-05, |
| "loss": 3.4872, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.371295360277994e-05, |
| "loss": 3.4713, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3704584034073153e-05, |
| "loss": 3.4727, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3696198086562633e-05, |
| "loss": 3.4618, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3687812139052117e-05, |
| "loss": 3.4755, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3679426191541593e-05, |
| "loss": 3.4732, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3671040244031073e-05, |
| "loss": 3.4747, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3662654296520553e-05, |
| "loss": 3.4657, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3654284727813762e-05, |
| "loss": 3.4727, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3645898780303242e-05, |
| "loss": 3.4658, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3637512832792726e-05, |
| "loss": 3.4627, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3629126885282206e-05, |
| "loss": 3.4596, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3620740937771686e-05, |
| "loss": 3.4617, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3612354990261166e-05, |
| "loss": 3.4712, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3603969042750645e-05, |
| "loss": 3.465, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3595583095240122e-05, |
| "loss": 3.4843, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3587213526533335e-05, |
| "loss": 3.4748, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3578827579022818e-05, |
| "loss": 3.4799, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3570441631512298e-05, |
| "loss": 3.466, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3562055684001774e-05, |
| "loss": 3.4707, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3553686115294987e-05, |
| "loss": 3.4694, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3545300167784467e-05, |
| "loss": 3.4806, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3536914220273947e-05, |
| "loss": 3.4678, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3528528272763427e-05, |
| "loss": 3.4649, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.352015870405664e-05, |
| "loss": 3.4613, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.351177275654612e-05, |
| "loss": 3.4694, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3503386809035596e-05, |
| "loss": 3.4745, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3495000861525076e-05, |
| "loss": 3.474, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3486631292818288e-05, |
| "loss": 3.4771, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.347824534530777e-05, |
| "loss": 3.4742, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3469859397797248e-05, |
| "loss": 3.4702, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3461473450286728e-05, |
| "loss": 3.475, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.345310388157994e-05, |
| "loss": 3.4735, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3444717934069417e-05, |
| "loss": 3.4605, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3436331986558897e-05, |
| "loss": 3.4596, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.342794603904838e-05, |
| "loss": 3.4601, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3419576470341593e-05, |
| "loss": 3.4614, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.341119052283107e-05, |
| "loss": 3.4764, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.340280457532055e-05, |
| "loss": 3.4711, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.339441862781003e-05, |
| "loss": 3.4609, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3386049059103242e-05, |
| "loss": 3.4706, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3377663111592722e-05, |
| "loss": 3.4648, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3369277164082202e-05, |
| "loss": 3.4668, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3360891216571682e-05, |
| "loss": 3.4575, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.335252164786489e-05, |
| "loss": 3.4395, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.334413570035437e-05, |
| "loss": 3.4882, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.333574975284385e-05, |
| "loss": 3.4694, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3327363805333334e-05, |
| "loss": 3.475, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3318994236626543e-05, |
| "loss": 3.4595, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3310608289116023e-05, |
| "loss": 3.4689, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3302222341605503e-05, |
| "loss": 3.4543, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3293852772898716e-05, |
| "loss": 3.4672, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3285466825388196e-05, |
| "loss": 3.467, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3277080877877675e-05, |
| "loss": 3.4653, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3268694930367155e-05, |
| "loss": 3.4833, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3260325361660365e-05, |
| "loss": 3.4606, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3251939414149844e-05, |
| "loss": 3.4542, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3243553466639324e-05, |
| "loss": 3.4804, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3235167519128804e-05, |
| "loss": 3.4474, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3226797950422017e-05, |
| "loss": 3.4566, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3218412002911497e-05, |
| "loss": 3.4732, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3210026055400977e-05, |
| "loss": 3.4733, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3201640107890457e-05, |
| "loss": 3.4559, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3193270539183666e-05, |
| "loss": 3.4644, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.318488459167315e-05, |
| "loss": 3.4482, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.317649864416263e-05, |
| "loss": 3.4613, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.316811269665211e-05, |
| "loss": 3.4666, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3159743127945318e-05, |
| "loss": 3.455, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3151357180434798e-05, |
| "loss": 3.4715, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3142971232924278e-05, |
| "loss": 3.4655, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3134585285413758e-05, |
| "loss": 3.4787, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.312621571670697e-05, |
| "loss": 3.4577, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.311782976919645e-05, |
| "loss": 3.4645, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.310944382168593e-05, |
| "loss": 3.4675, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.310105787417541e-05, |
| "loss": 3.4569, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.309268830546862e-05, |
| "loss": 3.4715, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3084302357958103e-05, |
| "loss": 3.4653, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3075916410447583e-05, |
| "loss": 3.4634, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3067530462937063e-05, |
| "loss": 3.4663, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3059160894230272e-05, |
| "loss": 3.4548, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3050774946719752e-05, |
| "loss": 3.455, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3042388999209232e-05, |
| "loss": 3.4625, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3034003051698712e-05, |
| "loss": 3.4681, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3025633482991924e-05, |
| "loss": 3.4597, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3017247535481404e-05, |
| "loss": 3.4637, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3008861587970884e-05, |
| "loss": 3.4499, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3000475640460364e-05, |
| "loss": 3.4616, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2992106071753573e-05, |
| "loss": 3.4537, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2983720124243057e-05, |
| "loss": 3.4634, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2975334176732536e-05, |
| "loss": 3.4754, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2966948229222016e-05, |
| "loss": 3.4672, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2958578660515226e-05, |
| "loss": 3.4695, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2950192713004705e-05, |
| "loss": 3.4518, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2941806765494185e-05, |
| "loss": 3.4618, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2933420817983665e-05, |
| "loss": 3.4601, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2925051249276878e-05, |
| "loss": 3.4709, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2916665301766358e-05, |
| "loss": 3.4644, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2908279354255838e-05, |
| "loss": 3.4683, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2899893406745314e-05, |
| "loss": 3.4586, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2891523838038527e-05, |
| "loss": 3.4731, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.288313789052801e-05, |
| "loss": 3.4541, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.287475194301749e-05, |
| "loss": 3.4649, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2866365995506967e-05, |
| "loss": 3.4512, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.285799642680018e-05, |
| "loss": 3.4688, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.284961047928966e-05, |
| "loss": 3.4545, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.284122453177914e-05, |
| "loss": 3.4634, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.283283858426862e-05, |
| "loss": 3.4582, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.282446901556183e-05, |
| "loss": 3.461, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.281608306805131e-05, |
| "loss": 3.4605, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2807697120540788e-05, |
| "loss": 3.4593, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2799311173030268e-05, |
| "loss": 3.4695, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.279094160432348e-05, |
| "loss": 3.4726, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2782555656812964e-05, |
| "loss": 3.4666, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.277416970930244e-05, |
| "loss": 3.4676, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.276578376179192e-05, |
| "loss": 3.4574, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2757414193085133e-05, |
| "loss": 3.4597, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2749028245574613e-05, |
| "loss": 3.4608, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.274064229806409e-05, |
| "loss": 3.4671, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2732256350553573e-05, |
| "loss": 3.4457, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2723886781846785e-05, |
| "loss": 3.4644, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2715500834336262e-05, |
| "loss": 3.4623, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2707114886825742e-05, |
| "loss": 3.4644, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2698728939315222e-05, |
| "loss": 3.4593, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2690359370608434e-05, |
| "loss": 3.4594, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2681973423097914e-05, |
| "loss": 3.4597, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2673587475587394e-05, |
| "loss": 3.471, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2665201528076874e-05, |
| "loss": 3.468, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2656831959370087e-05, |
| "loss": 3.4733, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2648446011859563e-05, |
| "loss": 3.452, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2640060064349043e-05, |
| "loss": 3.4669, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2631674116838526e-05, |
| "loss": 3.4666, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2623304548131736e-05, |
| "loss": 3.4592, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2614918600621215e-05, |
| "loss": 3.4643, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2606532653110695e-05, |
| "loss": 3.4715, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2598146705600175e-05, |
| "loss": 3.4645, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2589777136893388e-05, |
| "loss": 3.4808, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2581391189382868e-05, |
| "loss": 3.4614, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2573005241872348e-05, |
| "loss": 3.4619, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2564619294361828e-05, |
| "loss": 3.4593, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2556249725655037e-05, |
| "loss": 3.4595, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2547863778144517e-05, |
| "loss": 3.46, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2539477830633997e-05, |
| "loss": 3.4599, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.253110826192721e-05, |
| "loss": 3.4659, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.252272231441669e-05, |
| "loss": 3.4452, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.251433636690617e-05, |
| "loss": 3.4668, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8602488040924072, |
| "eval_runtime": 303.1037, |
| "eval_samples_per_second": 1258.946, |
| "eval_steps_per_second": 39.343, |
| "step": 1679040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.250595041939565e-05, |
| "loss": 3.4656, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2497580850688858e-05, |
| "loss": 3.4578, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.248919490317834e-05, |
| "loss": 3.4662, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.248080895566782e-05, |
| "loss": 3.4683, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.24724230081573e-05, |
| "loss": 3.4783, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.246405343945051e-05, |
| "loss": 3.46, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2455683870743723e-05, |
| "loss": 3.4648, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2447297923233203e-05, |
| "loss": 3.4502, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2438911975722683e-05, |
| "loss": 3.4647, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2430526028212163e-05, |
| "loss": 3.4625, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2422140080701643e-05, |
| "loss": 3.4621, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2413754133191123e-05, |
| "loss": 3.4553, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2405384564484332e-05, |
| "loss": 3.4588, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2396998616973812e-05, |
| "loss": 3.4534, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2388612669463295e-05, |
| "loss": 3.4565, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2380226721952775e-05, |
| "loss": 3.4486, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2371840774442255e-05, |
| "loss": 3.4486, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2363454826931735e-05, |
| "loss": 3.4585, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.235506887942121e-05, |
| "loss": 3.4548, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.234668293191069e-05, |
| "loss": 3.4704, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2338313363203904e-05, |
| "loss": 3.4656, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2329927415693387e-05, |
| "loss": 3.4693, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2321541468182864e-05, |
| "loss": 3.4499, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2313155520672344e-05, |
| "loss": 3.46, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2304785951965556e-05, |
| "loss": 3.4589, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2296400004455036e-05, |
| "loss": 3.4683, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2288014056944513e-05, |
| "loss": 3.4549, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2279628109433996e-05, |
| "loss": 3.4585, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.227125854072721e-05, |
| "loss": 3.4525, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2262872593216685e-05, |
| "loss": 3.4551, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2254486645706165e-05, |
| "loss": 3.4609, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2246100698195645e-05, |
| "loss": 3.4668, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2237731129488858e-05, |
| "loss": 3.4675, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2229345181978338e-05, |
| "loss": 3.4639, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2220959234467818e-05, |
| "loss": 3.4578, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2212573286957298e-05, |
| "loss": 3.4612, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.220420371825051e-05, |
| "loss": 3.4627, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2195817770739987e-05, |
| "loss": 3.4507, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2187431823229467e-05, |
| "loss": 3.4463, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.217904587571895e-05, |
| "loss": 3.4459, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.217067630701216e-05, |
| "loss": 3.4528, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.216229035950164e-05, |
| "loss": 3.4605, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.215390441199112e-05, |
| "loss": 3.4629, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.21455184644806e-05, |
| "loss": 3.4529, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.213714889577381e-05, |
| "loss": 3.4559, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.212876294826329e-05, |
| "loss": 3.4555, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.212037700075277e-05, |
| "loss": 3.4563, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.211199105324225e-05, |
| "loss": 3.4465, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.210362148453546e-05, |
| "loss": 3.4284, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.209523553702494e-05, |
| "loss": 3.4729, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.208684958951442e-05, |
| "loss": 3.4579, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2078463642003904e-05, |
| "loss": 3.4651, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2070094073297113e-05, |
| "loss": 3.4528, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2061708125786593e-05, |
| "loss": 3.4541, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2053322178276073e-05, |
| "loss": 3.4427, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2044936230765553e-05, |
| "loss": 3.4533, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2036566662058765e-05, |
| "loss": 3.4586, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2028180714548245e-05, |
| "loss": 3.4512, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2019794767037725e-05, |
| "loss": 3.475, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2011408819527205e-05, |
| "loss": 3.452, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2003039250820414e-05, |
| "loss": 3.444, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1994653303309894e-05, |
| "loss": 3.4673, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1986267355799374e-05, |
| "loss": 3.4392, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1977897787092586e-05, |
| "loss": 3.4404, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1969511839582066e-05, |
| "loss": 3.4671, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1961125892071546e-05, |
| "loss": 3.4612, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1952739944561026e-05, |
| "loss": 3.443, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1944353997050506e-05, |
| "loss": 3.4535, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.193598442834372e-05, |
| "loss": 3.4388, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.19275984808332e-05, |
| "loss": 3.4485, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.191921253332268e-05, |
| "loss": 3.4527, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1910842964615888e-05, |
| "loss": 3.4462, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1902457017105368e-05, |
| "loss": 3.4586, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1894071069594848e-05, |
| "loss": 3.4543, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1885685122084328e-05, |
| "loss": 3.4652, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.187729917457381e-05, |
| "loss": 3.4478, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1868913227063288e-05, |
| "loss": 3.4522, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1860527279552767e-05, |
| "loss": 3.4553, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1852141332042247e-05, |
| "loss": 3.4475, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.184377176333546e-05, |
| "loss": 3.4578, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1835385815824936e-05, |
| "loss": 3.4567, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.182699986831442e-05, |
| "loss": 3.4508, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.18186139208039e-05, |
| "loss": 3.4621, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1810244352097112e-05, |
| "loss": 3.4403, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.180185840458659e-05, |
| "loss": 3.4428, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.179347245707607e-05, |
| "loss": 3.4531, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.178508650956555e-05, |
| "loss": 3.4571, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.177671694085876e-05, |
| "loss": 3.4478, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.176833099334824e-05, |
| "loss": 3.4537, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.175994504583772e-05, |
| "loss": 3.4373, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1751575477130934e-05, |
| "loss": 3.4501, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.174318952962041e-05, |
| "loss": 3.4416, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.173480358210989e-05, |
| "loss": 3.4544, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1726417634599373e-05, |
| "loss": 3.4625, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1718048065892583e-05, |
| "loss": 3.4547, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1709662118382063e-05, |
| "loss": 3.4585, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1701276170871543e-05, |
| "loss": 3.4426, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1692890223361022e-05, |
| "loss": 3.4491, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1684520654654235e-05, |
| "loss": 3.4463, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1676134707143715e-05, |
| "loss": 3.4634, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1667748759633195e-05, |
| "loss": 3.453, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1659362812122675e-05, |
| "loss": 3.4595, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1650993243415884e-05, |
| "loss": 3.445, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1642607295905364e-05, |
| "loss": 3.4561, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1634221348394844e-05, |
| "loss": 3.4471, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1625835400884327e-05, |
| "loss": 3.4533, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1617465832177536e-05, |
| "loss": 3.4406, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1609079884667016e-05, |
| "loss": 3.4565, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1600693937156496e-05, |
| "loss": 3.4412, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1592307989645976e-05, |
| "loss": 3.4518, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.158393842093919e-05, |
| "loss": 3.4432, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.157555247342867e-05, |
| "loss": 3.4547, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.156716652591815e-05, |
| "loss": 3.4476, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.155878057840763e-05, |
| "loss": 3.4472, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1550411009700838e-05, |
| "loss": 3.4607, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1542025062190318e-05, |
| "loss": 3.461, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1533639114679797e-05, |
| "loss": 3.4593, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.152525316716928e-05, |
| "loss": 3.4515, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.151688359846249e-05, |
| "loss": 3.4502, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.150849765095197e-05, |
| "loss": 3.4491, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.150011170344145e-05, |
| "loss": 3.4502, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.149172575593093e-05, |
| "loss": 3.4512, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1483356187224142e-05, |
| "loss": 3.4392, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1474970239713622e-05, |
| "loss": 3.4539, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1466584292203102e-05, |
| "loss": 3.4534, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1458198344692582e-05, |
| "loss": 3.4536, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.144982877598579e-05, |
| "loss": 3.4515, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.144144282847527e-05, |
| "loss": 3.4451, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.143305688096475e-05, |
| "loss": 3.4474, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1424670933454234e-05, |
| "loss": 3.4636, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1416301364747444e-05, |
| "loss": 3.4601, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1407915417236924e-05, |
| "loss": 3.458, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1399529469726404e-05, |
| "loss": 3.4422, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1391143522215883e-05, |
| "loss": 3.457, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1382773953509096e-05, |
| "loss": 3.4521, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1374388005998576e-05, |
| "loss": 3.4468, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1366002058488056e-05, |
| "loss": 3.453, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1357616110977536e-05, |
| "loss": 3.4546, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1349246542270745e-05, |
| "loss": 3.4542, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1340860594760225e-05, |
| "loss": 3.4712, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1332474647249705e-05, |
| "loss": 3.4481, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1324088699739185e-05, |
| "loss": 3.4543, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1315719131032397e-05, |
| "loss": 3.4519, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1307333183521877e-05, |
| "loss": 3.45, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1298947236011357e-05, |
| "loss": 3.4477, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1290561288500834e-05, |
| "loss": 3.4467, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1282191719794046e-05, |
| "loss": 3.4517, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.127380577228353e-05, |
| "loss": 3.4372, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.126541982477301e-05, |
| "loss": 3.4557, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8607735633850098, |
| "eval_runtime": 302.8237, |
| "eval_samples_per_second": 1260.109, |
| "eval_steps_per_second": 39.379, |
| "step": 1755360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1257033877262486e-05, |
| "loss": 3.4538, |
| "step": 1755648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1248647929751966e-05, |
| "loss": 3.4461, |
| "step": 1756160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1240261982241446e-05, |
| "loss": 3.4539, |
| "step": 1756672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1231876034730926e-05, |
| "loss": 3.4574, |
| "step": 1757184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.122350646602414e-05, |
| "loss": 3.4657, |
| "step": 1757696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.121512051851362e-05, |
| "loss": 3.4486, |
| "step": 1758208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.120675094980683e-05, |
| "loss": 3.4579, |
| "step": 1758720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1198365002296307e-05, |
| "loss": 3.442, |
| "step": 1759232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1189979054785787e-05, |
| "loss": 3.4465, |
| "step": 1759744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1181593107275267e-05, |
| "loss": 3.4566, |
| "step": 1760256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.117320715976475e-05, |
| "loss": 3.4499, |
| "step": 1760768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.116482121225423e-05, |
| "loss": 3.4419, |
| "step": 1761280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.115645164354744e-05, |
| "loss": 3.456, |
| "step": 1761792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.114806569603692e-05, |
| "loss": 3.4439, |
| "step": 1762304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.11396797485264e-05, |
| "loss": 3.4389, |
| "step": 1762816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.113129380101588e-05, |
| "loss": 3.4392, |
| "step": 1763328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.112290785350536e-05, |
| "loss": 3.4381, |
| "step": 1763840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.111452190599484e-05, |
| "loss": 3.4492, |
| "step": 1764352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.110613595848432e-05, |
| "loss": 3.4377, |
| "step": 1764864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.10977500109738e-05, |
| "loss": 3.4615, |
| "step": 1765376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1089380442267012e-05, |
| "loss": 3.4523, |
| "step": 1765888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.108099449475649e-05, |
| "loss": 3.4603, |
| "step": 1766400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.107260854724597e-05, |
| "loss": 3.4426, |
| "step": 1766912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1064222599735452e-05, |
| "loss": 3.4451, |
| "step": 1767424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.105585303102866e-05, |
| "loss": 3.452, |
| "step": 1767936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.104746708351814e-05, |
| "loss": 3.4539, |
| "step": 1768448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.103908113600762e-05, |
| "loss": 3.4443, |
| "step": 1768960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.10306951884971e-05, |
| "loss": 3.4475, |
| "step": 1769472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1022325619790313e-05, |
| "loss": 3.4389, |
| "step": 1769984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1013939672279793e-05, |
| "loss": 3.4454, |
| "step": 1770496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1005553724769273e-05, |
| "loss": 3.4472, |
| "step": 1771008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0997167777258753e-05, |
| "loss": 3.4551, |
| "step": 1771520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0988798208551962e-05, |
| "loss": 3.4527, |
| "step": 1772032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0980412261041442e-05, |
| "loss": 3.4533, |
| "step": 1772544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0972026313530922e-05, |
| "loss": 3.4495, |
| "step": 1773056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0963640366020405e-05, |
| "loss": 3.4491, |
| "step": 1773568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0955270797313615e-05, |
| "loss": 3.4509, |
| "step": 1774080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0946884849803095e-05, |
| "loss": 3.4407, |
| "step": 1774592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0938498902292574e-05, |
| "loss": 3.4368, |
| "step": 1775104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0930112954782054e-05, |
| "loss": 3.4343, |
| "step": 1775616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0921743386075267e-05, |
| "loss": 3.4422, |
| "step": 1776128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0913357438564747e-05, |
| "loss": 3.444, |
| "step": 1776640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0904971491054227e-05, |
| "loss": 3.453, |
| "step": 1777152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0896585543543707e-05, |
| "loss": 3.4443, |
| "step": 1777664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0888215974836916e-05, |
| "loss": 3.4442, |
| "step": 1778176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0879830027326396e-05, |
| "loss": 3.4473, |
| "step": 1778688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0871444079815876e-05, |
| "loss": 3.4469, |
| "step": 1779200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.086305813230536e-05, |
| "loss": 3.4341, |
| "step": 1779712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0854688563598568e-05, |
| "loss": 3.4191, |
| "step": 1780224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0846302616088048e-05, |
| "loss": 3.456, |
| "step": 1780736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0837916668577528e-05, |
| "loss": 3.4499, |
| "step": 1781248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0829530721067008e-05, |
| "loss": 3.4559, |
| "step": 1781760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.082116115236022e-05, |
| "loss": 3.4387, |
| "step": 1782272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.08127752048497e-05, |
| "loss": 3.4451, |
| "step": 1782784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.080438925733918e-05, |
| "loss": 3.4347, |
| "step": 1783296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.079601968863239e-05, |
| "loss": 3.4419, |
| "step": 1783808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.078763374112187e-05, |
| "loss": 3.4524, |
| "step": 1784320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077924779361135e-05, |
| "loss": 3.4347, |
| "step": 1784832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077086184610083e-05, |
| "loss": 3.4645, |
| "step": 1785344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0762492277394042e-05, |
| "loss": 3.4457, |
| "step": 1785856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0754106329883522e-05, |
| "loss": 3.4281, |
| "step": 1786368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0745720382373002e-05, |
| "loss": 3.4579, |
| "step": 1786880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0737334434862482e-05, |
| "loss": 3.4304, |
| "step": 1787392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.072896486615569e-05, |
| "loss": 3.4287, |
| "step": 1787904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0720578918645174e-05, |
| "loss": 3.4549, |
| "step": 1788416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0712192971134654e-05, |
| "loss": 3.4478, |
| "step": 1788928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0703807023624134e-05, |
| "loss": 3.434, |
| "step": 1789440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0695437454917343e-05, |
| "loss": 3.4404, |
| "step": 1789952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0687051507406823e-05, |
| "loss": 3.432, |
| "step": 1790464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0678665559896303e-05, |
| "loss": 3.4359, |
| "step": 1790976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0670279612385783e-05, |
| "loss": 3.4449, |
| "step": 1791488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0661910043678996e-05, |
| "loss": 3.4351, |
| "step": 1792000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0653524096168476e-05, |
| "loss": 3.4498, |
| "step": 1792512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0645138148657956e-05, |
| "loss": 3.4418, |
| "step": 1793024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0636752201147435e-05, |
| "loss": 3.4529, |
| "step": 1793536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0628382632440645e-05, |
| "loss": 3.4384, |
| "step": 1794048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0619996684930128e-05, |
| "loss": 3.4458, |
| "step": 1794560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0611610737419608e-05, |
| "loss": 3.4429, |
| "step": 1795072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0603224789909088e-05, |
| "loss": 3.4369, |
| "step": 1795584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0594855221202297e-05, |
| "loss": 3.4507, |
| "step": 1796096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0586469273691777e-05, |
| "loss": 3.4472, |
| "step": 1796608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0578083326181257e-05, |
| "loss": 3.4358, |
| "step": 1797120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0569697378670737e-05, |
| "loss": 3.4528, |
| "step": 1797632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.056132780996395e-05, |
| "loss": 3.4295, |
| "step": 1798144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.055294186245343e-05, |
| "loss": 3.4307, |
| "step": 1798656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.054455591494291e-05, |
| "loss": 3.4452, |
| "step": 1799168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0536169967432386e-05, |
| "loss": 3.4453, |
| "step": 1799680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0527800398725598e-05, |
| "loss": 3.4399, |
| "step": 1800192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0519414451215078e-05, |
| "loss": 3.4469, |
| "step": 1800704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.051102850370456e-05, |
| "loss": 3.4288, |
| "step": 1801216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0502642556194038e-05, |
| "loss": 3.4365, |
| "step": 1801728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.049427298748725e-05, |
| "loss": 3.4346, |
| "step": 1802240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.048588703997673e-05, |
| "loss": 3.4408, |
| "step": 1802752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0477501092466207e-05, |
| "loss": 3.4525, |
| "step": 1803264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.046911514495569e-05, |
| "loss": 3.444, |
| "step": 1803776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0460745576248903e-05, |
| "loss": 3.4495, |
| "step": 1804288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0452359628738383e-05, |
| "loss": 3.4324, |
| "step": 1804800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.044397368122786e-05, |
| "loss": 3.4385, |
| "step": 1805312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.043558773371734e-05, |
| "loss": 3.4351, |
| "step": 1805824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0427218165010552e-05, |
| "loss": 3.448, |
| "step": 1806336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0418832217500032e-05, |
| "loss": 3.4425, |
| "step": 1806848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0410446269989512e-05, |
| "loss": 3.4498, |
| "step": 1807360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0402060322478992e-05, |
| "loss": 3.4395, |
| "step": 1807872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0393690753772204e-05, |
| "loss": 3.4428, |
| "step": 1808384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.038530480626168e-05, |
| "loss": 3.4409, |
| "step": 1808896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.037691885875116e-05, |
| "loss": 3.4397, |
| "step": 1809408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0368532911240644e-05, |
| "loss": 3.429, |
| "step": 1809920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0360163342533857e-05, |
| "loss": 3.4496, |
| "step": 1810432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0351777395023333e-05, |
| "loss": 3.4307, |
| "step": 1810944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0343391447512813e-05, |
| "loss": 3.4394, |
| "step": 1811456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0335005500002293e-05, |
| "loss": 3.4317, |
| "step": 1811968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0326635931295506e-05, |
| "loss": 3.4416, |
| "step": 1812480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0318249983784986e-05, |
| "loss": 3.4401, |
| "step": 1812992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0309864036274465e-05, |
| "loss": 3.4354, |
| "step": 1813504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0301478088763945e-05, |
| "loss": 3.4483, |
| "step": 1814016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0293108520057155e-05, |
| "loss": 3.4527, |
| "step": 1814528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0284722572546634e-05, |
| "loss": 3.4503, |
| "step": 1815040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0276336625036114e-05, |
| "loss": 3.4401, |
| "step": 1815552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0267950677525598e-05, |
| "loss": 3.4417, |
| "step": 1816064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0259581108818807e-05, |
| "loss": 3.4373, |
| "step": 1816576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0251195161308287e-05, |
| "loss": 3.4397, |
| "step": 1817088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0242809213797767e-05, |
| "loss": 3.4386, |
| "step": 1817600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0234423266287247e-05, |
| "loss": 3.4283, |
| "step": 1818112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.022605369758046e-05, |
| "loss": 3.4435, |
| "step": 1818624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.021766775006994e-05, |
| "loss": 3.4449, |
| "step": 1819136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.020928180255942e-05, |
| "loss": 3.4372, |
| "step": 1819648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.02008958550489e-05, |
| "loss": 3.4401, |
| "step": 1820160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0192526286342108e-05, |
| "loss": 3.4386, |
| "step": 1820672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0184140338831588e-05, |
| "loss": 3.4361, |
| "step": 1821184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0175754391321068e-05, |
| "loss": 3.4503, |
| "step": 1821696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.016736844381055e-05, |
| "loss": 3.4457, |
| "step": 1822208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.015899887510376e-05, |
| "loss": 3.4494, |
| "step": 1822720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.015061292759324e-05, |
| "loss": 3.4317, |
| "step": 1823232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.014222698008272e-05, |
| "loss": 3.4437, |
| "step": 1823744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.013385741137593e-05, |
| "loss": 3.4416, |
| "step": 1824256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0125471463865413e-05, |
| "loss": 3.4399, |
| "step": 1824768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0117085516354893e-05, |
| "loss": 3.4426, |
| "step": 1825280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0108699568844373e-05, |
| "loss": 3.4423, |
| "step": 1825792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0100330000137582e-05, |
| "loss": 3.4466, |
| "step": 1826304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0091944052627062e-05, |
| "loss": 3.4635, |
| "step": 1826816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0083558105116542e-05, |
| "loss": 3.4342, |
| "step": 1827328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0075172157606022e-05, |
| "loss": 3.4465, |
| "step": 1827840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0066802588899234e-05, |
| "loss": 3.4399, |
| "step": 1828352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0058416641388714e-05, |
| "loss": 3.4411, |
| "step": 1828864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0050030693878194e-05, |
| "loss": 3.4376, |
| "step": 1829376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0041644746367674e-05, |
| "loss": 3.4357, |
| "step": 1829888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0033275177660883e-05, |
| "loss": 3.4428, |
| "step": 1830400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0024889230150367e-05, |
| "loss": 3.4269, |
| "step": 1830912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0016503282639847e-05, |
| "loss": 3.4437, |
| "step": 1831424 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.863162040710449, |
| "eval_runtime": 304.4739, |
| "eval_samples_per_second": 1253.28, |
| "eval_steps_per_second": 39.166, |
| "step": 1831680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.0008117335129326e-05, |
| "loss": 3.4391, |
| "step": 1831936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9999747766422536e-05, |
| "loss": 3.4361, |
| "step": 1832448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9991361818912016e-05, |
| "loss": 3.4411, |
| "step": 1832960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9982975871401495e-05, |
| "loss": 3.4447, |
| "step": 1833472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9974589923890975e-05, |
| "loss": 3.4551, |
| "step": 1833984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9966220355184188e-05, |
| "loss": 3.4397, |
| "step": 1834496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9957850786477397e-05, |
| "loss": 3.4496, |
| "step": 1835008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9949464838966877e-05, |
| "loss": 3.4333, |
| "step": 1835520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9941078891456357e-05, |
| "loss": 3.436, |
| "step": 1836032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9932692943945837e-05, |
| "loss": 3.4477, |
| "step": 1836544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9924306996435317e-05, |
| "loss": 3.4403, |
| "step": 1837056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.99159210489248e-05, |
| "loss": 3.4319, |
| "step": 1837568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.990755148021801e-05, |
| "loss": 3.4468, |
| "step": 1838080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.989916553270749e-05, |
| "loss": 3.4285, |
| "step": 1838592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.989077958519697e-05, |
| "loss": 3.4308, |
| "step": 1839104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.988239363768645e-05, |
| "loss": 3.4234, |
| "step": 1839616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.987400769017593e-05, |
| "loss": 3.4314, |
| "step": 1840128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.986562174266541e-05, |
| "loss": 3.4403, |
| "step": 1840640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.985723579515489e-05, |
| "loss": 3.4289, |
| "step": 1841152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.984884984764437e-05, |
| "loss": 3.4488, |
| "step": 1841664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9840480278937578e-05, |
| "loss": 3.4454, |
| "step": 1842176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9832094331427058e-05, |
| "loss": 3.4464, |
| "step": 1842688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9823708383916538e-05, |
| "loss": 3.4355, |
| "step": 1843200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.981532243640602e-05, |
| "loss": 3.436, |
| "step": 1843712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.980695286769923e-05, |
| "loss": 3.4416, |
| "step": 1844224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.979856692018871e-05, |
| "loss": 3.4423, |
| "step": 1844736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.979018097267819e-05, |
| "loss": 3.4326, |
| "step": 1845248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.978179502516767e-05, |
| "loss": 3.4391, |
| "step": 1845760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9773425456460883e-05, |
| "loss": 3.4286, |
| "step": 1846272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9765039508950363e-05, |
| "loss": 3.4348, |
| "step": 1846784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9756653561439843e-05, |
| "loss": 3.4381, |
| "step": 1847296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9748267613929323e-05, |
| "loss": 3.4408, |
| "step": 1847808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9739898045222532e-05, |
| "loss": 3.4452, |
| "step": 1848320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9731512097712012e-05, |
| "loss": 3.4459, |
| "step": 1848832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.972312615020149e-05, |
| "loss": 3.4349, |
| "step": 1849344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9714740202690975e-05, |
| "loss": 3.4403, |
| "step": 1849856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9706370633984184e-05, |
| "loss": 3.4418, |
| "step": 1850368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9697984686473664e-05, |
| "loss": 3.4294, |
| "step": 1850880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9689598738963144e-05, |
| "loss": 3.4303, |
| "step": 1851392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9681212791452624e-05, |
| "loss": 3.4249, |
| "step": 1851904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9672843222745836e-05, |
| "loss": 3.4289, |
| "step": 1852416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9664457275235316e-05, |
| "loss": 3.4348, |
| "step": 1852928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9656071327724796e-05, |
| "loss": 3.4438, |
| "step": 1853440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9647685380214276e-05, |
| "loss": 3.434, |
| "step": 1853952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9639315811507485e-05, |
| "loss": 3.4321, |
| "step": 1854464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9630929863996965e-05, |
| "loss": 3.4321, |
| "step": 1854976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9622543916486445e-05, |
| "loss": 3.4386, |
| "step": 1855488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9614157968975925e-05, |
| "loss": 3.4224, |
| "step": 1856000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9605788400269138e-05, |
| "loss": 3.4124, |
| "step": 1856512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9597402452758618e-05, |
| "loss": 3.4433, |
| "step": 1857024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9589016505248098e-05, |
| "loss": 3.4372, |
| "step": 1857536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9580630557737578e-05, |
| "loss": 3.4444, |
| "step": 1858048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9572260989030787e-05, |
| "loss": 3.4288, |
| "step": 1858560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.956387504152027e-05, |
| "loss": 3.4329, |
| "step": 1859072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.955548909400975e-05, |
| "loss": 3.4251, |
| "step": 1859584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.954710314649923e-05, |
| "loss": 3.4308, |
| "step": 1860096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.953873357779244e-05, |
| "loss": 3.4429, |
| "step": 1860608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.953034763028192e-05, |
| "loss": 3.4235, |
| "step": 1861120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.95219616827714e-05, |
| "loss": 3.4566, |
| "step": 1861632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.951359211406461e-05, |
| "loss": 3.4314, |
| "step": 1862144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.950520616655409e-05, |
| "loss": 3.4162, |
| "step": 1862656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.949682021904357e-05, |
| "loss": 3.4461, |
| "step": 1863168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.948843427153305e-05, |
| "loss": 3.4261, |
| "step": 1863680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.948006470282626e-05, |
| "loss": 3.4168, |
| "step": 1864192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.947167875531574e-05, |
| "loss": 3.4409, |
| "step": 1864704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9463292807805224e-05, |
| "loss": 3.4391, |
| "step": 1865216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9454906860294704e-05, |
| "loss": 3.4256, |
| "step": 1865728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9446537291587913e-05, |
| "loss": 3.4274, |
| "step": 1866240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9438151344077393e-05, |
| "loss": 3.4206, |
| "step": 1866752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9429765396566873e-05, |
| "loss": 3.4239, |
| "step": 1867264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9421379449056353e-05, |
| "loss": 3.4366, |
| "step": 1867776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9413009880349565e-05, |
| "loss": 3.4253, |
| "step": 1868288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9404623932839045e-05, |
| "loss": 3.433, |
| "step": 1868800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9396237985328525e-05, |
| "loss": 3.4295, |
| "step": 1869312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9387852037818e-05, |
| "loss": 3.4432, |
| "step": 1869824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9379482469111214e-05, |
| "loss": 3.4287, |
| "step": 1870336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9371096521600694e-05, |
| "loss": 3.4362, |
| "step": 1870848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9362710574090177e-05, |
| "loss": 3.4298, |
| "step": 1871360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9354324626579654e-05, |
| "loss": 3.4275, |
| "step": 1871872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9345955057872866e-05, |
| "loss": 3.4391, |
| "step": 1872384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9337569110362346e-05, |
| "loss": 3.4358, |
| "step": 1872896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9329183162851826e-05, |
| "loss": 3.4276, |
| "step": 1873408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9320797215341306e-05, |
| "loss": 3.4375, |
| "step": 1873920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.931242764663452e-05, |
| "loss": 3.4177, |
| "step": 1874432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9304041699124e-05, |
| "loss": 3.4218, |
| "step": 1874944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9295655751613475e-05, |
| "loss": 3.4325, |
| "step": 1875456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9287269804102955e-05, |
| "loss": 3.4353, |
| "step": 1875968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9278883856592435e-05, |
| "loss": 3.4217, |
| "step": 1876480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.9270497909081915e-05, |
| "loss": 3.4372, |
| "step": 1876992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.92621119615714e-05, |
| "loss": 3.4228, |
| "step": 1877504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.925372601406088e-05, |
| "loss": 3.4247, |
| "step": 1878016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9245356445354088e-05, |
| "loss": 3.4219, |
| "step": 1878528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9236970497843568e-05, |
| "loss": 3.4298, |
| "step": 1879040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9228584550333048e-05, |
| "loss": 3.441, |
| "step": 1879552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9220198602822527e-05, |
| "loss": 3.4328, |
| "step": 1880064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.921182903411574e-05, |
| "loss": 3.4381, |
| "step": 1880576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.920344308660522e-05, |
| "loss": 3.4238, |
| "step": 1881088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.91950571390947e-05, |
| "loss": 3.4287, |
| "step": 1881600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.918667119158418e-05, |
| "loss": 3.4251, |
| "step": 1882112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.917830162287739e-05, |
| "loss": 3.4369, |
| "step": 1882624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.916991567536687e-05, |
| "loss": 3.4322, |
| "step": 1883136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.916152972785635e-05, |
| "loss": 3.4362, |
| "step": 1883648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9153143780345832e-05, |
| "loss": 3.4328, |
| "step": 1884160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.914477421163904e-05, |
| "loss": 3.431, |
| "step": 1884672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.913638826412852e-05, |
| "loss": 3.4324, |
| "step": 1885184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9128002316618e-05, |
| "loss": 3.4293, |
| "step": 1885696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9119616369107478e-05, |
| "loss": 3.4195, |
| "step": 1886208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9111246800400694e-05, |
| "loss": 3.4354, |
| "step": 1886720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9102860852890174e-05, |
| "loss": 3.424, |
| "step": 1887232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9094474905379654e-05, |
| "loss": 3.4271, |
| "step": 1887744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9086105336672863e-05, |
| "loss": 3.4283, |
| "step": 1888256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9077719389162343e-05, |
| "loss": 3.4276, |
| "step": 1888768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9069333441651823e-05, |
| "loss": 3.4299, |
| "step": 1889280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9060947494141302e-05, |
| "loss": 3.4245, |
| "step": 1889792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9052577925434515e-05, |
| "loss": 3.4403, |
| "step": 1890304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9044191977923995e-05, |
| "loss": 3.4405, |
| "step": 1890816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9035806030413475e-05, |
| "loss": 3.4391, |
| "step": 1891328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.902742008290295e-05, |
| "loss": 3.4273, |
| "step": 1891840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9019050514196164e-05, |
| "loss": 3.4349, |
| "step": 1892352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9010664566685647e-05, |
| "loss": 3.4249, |
| "step": 1892864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.9002278619175127e-05, |
| "loss": 3.4302, |
| "step": 1893376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8993892671664604e-05, |
| "loss": 3.431, |
| "step": 1893888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8985523102957816e-05, |
| "loss": 3.4172, |
| "step": 1894400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8977137155447296e-05, |
| "loss": 3.4314, |
| "step": 1894912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8968751207936776e-05, |
| "loss": 3.4326, |
| "step": 1895424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8960365260426256e-05, |
| "loss": 3.4322, |
| "step": 1895936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.895199569171947e-05, |
| "loss": 3.4271, |
| "step": 1896448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.894360974420895e-05, |
| "loss": 3.4255, |
| "step": 1896960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8935223796698425e-05, |
| "loss": 3.4265, |
| "step": 1897472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8926837849187905e-05, |
| "loss": 3.4412, |
| "step": 1897984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8918468280481118e-05, |
| "loss": 3.4345, |
| "step": 1898496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.89100823329706e-05, |
| "loss": 3.445, |
| "step": 1899008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8901696385460078e-05, |
| "loss": 3.4213, |
| "step": 1899520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8893310437949557e-05, |
| "loss": 3.4298, |
| "step": 1900032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.888494086924277e-05, |
| "loss": 3.4347, |
| "step": 1900544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.887655492173225e-05, |
| "loss": 3.429, |
| "step": 1901056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.886816897422173e-05, |
| "loss": 3.4307, |
| "step": 1901568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.885978302671121e-05, |
| "loss": 3.4334, |
| "step": 1902080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8851413458004422e-05, |
| "loss": 3.4334, |
| "step": 1902592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.88430275104939e-05, |
| "loss": 3.4501, |
| "step": 1903104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.883464156298338e-05, |
| "loss": 3.4276, |
| "step": 1903616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.882625561547286e-05, |
| "loss": 3.4343, |
| "step": 1904128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.881788604676607e-05, |
| "loss": 3.4348, |
| "step": 1904640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.880950009925555e-05, |
| "loss": 3.4254, |
| "step": 1905152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.880111415174503e-05, |
| "loss": 3.4274, |
| "step": 1905664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.879272820423451e-05, |
| "loss": 3.424, |
| "step": 1906176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8784358635527724e-05, |
| "loss": 3.4311, |
| "step": 1906688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.87759726880172e-05, |
| "loss": 3.4215, |
| "step": 1907200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.8767586740506684e-05, |
| "loss": 3.4302, |
| "step": 1907712 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8628933429718018, |
| "eval_runtime": 304.8396, |
| "eval_samples_per_second": 1251.776, |
| "eval_steps_per_second": 39.119, |
| "step": 1908000 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.3029295525698816e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|