| { |
| "best_metric": 3.851067066192627, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-reconstruction/transformer/2/checkpoints/checkpoint-610560", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 610560, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.9415, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8131, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.1794, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.9713, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8122, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.7062, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.6008, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5311, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4432, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.3918, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3443, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.312, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 5.2581, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.1975, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1677, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1181, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.0986, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0757, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0358, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0114, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 4.9973, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.9687, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9431, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9252, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.9137, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.8794, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8654, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8568, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8275, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.8075, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.7977, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740117521192533e-05, |
| "loss": 4.791, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731731573682013e-05, |
| "loss": 4.7781, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.97233456261715e-05, |
| "loss": 4.761, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971495967866098e-05, |
| "loss": 4.7515, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 4.7469, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969818778363994e-05, |
| "loss": 4.7256, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968980183612942e-05, |
| "loss": 4.6986, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96814158886189e-05, |
| "loss": 4.6967, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967302994110837e-05, |
| "loss": 4.6713, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6802, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 4.6665, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964788847738054e-05, |
| "loss": 4.665, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963950252987002e-05, |
| "loss": 4.6436, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96311165823595e-05, |
| "loss": 4.6448, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6295, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 4.6224, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 4.6262, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959758917112116e-05, |
| "loss": 4.5883, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9589219602414374e-05, |
| "loss": 4.5962, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958083365490385e-05, |
| "loss": 4.5985, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957244770739333e-05, |
| "loss": 4.592, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956406175988281e-05, |
| "loss": 4.5678, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9555692191176016e-05, |
| "loss": 4.5586, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5509, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5438, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.559, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5216, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5367, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5287, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949700693740611e-05, |
| "loss": 4.5084, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 4.5169, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.493, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.4902, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 4.4897, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945509357865724e-05, |
| "loss": 4.4991, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.4688, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.4724, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 4.455, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.465, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4693, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.4655, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396424703691065e-05, |
| "loss": 4.4461, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388038756180545e-05, |
| "loss": 4.4578, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379652808670025e-05, |
| "loss": 4.4616, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371266861159505e-05, |
| "loss": 4.437, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362897292452714e-05, |
| "loss": 4.4317, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354511344942194e-05, |
| "loss": 4.426, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346125397431674e-05, |
| "loss": 4.4186, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 4.4255, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 4.4063, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 4.4245, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 4.4178, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 4.4033, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 4.3872, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 4.394, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 4.395, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 4.3967, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926229867993394e-05, |
| "loss": 4.3834, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925392911122715e-05, |
| "loss": 4.3868, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924554316371663e-05, |
| "loss": 4.3765, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923715721620611e-05, |
| "loss": 4.3678, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922877126869559e-05, |
| "loss": 4.3641, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922038532118507e-05, |
| "loss": 4.3699, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921199937367455e-05, |
| "loss": 4.3634, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920361342616403e-05, |
| "loss": 4.3638, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919522747865351e-05, |
| "loss": 4.3492, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918685790994672e-05, |
| "loss": 4.3475, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91784719624362e-05, |
| "loss": 4.3568, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9170086014925676e-05, |
| "loss": 4.3561, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916171644621889e-05, |
| "loss": 4.3453, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915333049870837e-05, |
| "loss": 4.3407, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914494455119785e-05, |
| "loss": 4.3323, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136558603687325e-05, |
| "loss": 4.3305, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128172656176805e-05, |
| "loss": 4.3331, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.3094, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91114171399595e-05, |
| "loss": 4.3195, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910303119244898e-05, |
| "loss": 4.3237, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909464524493846e-05, |
| "loss": 4.3213, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908625929742794e-05, |
| "loss": 4.3099, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907787334991742e-05, |
| "loss": 4.3189, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90694874024069e-05, |
| "loss": 4.2973, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906110145489638e-05, |
| "loss": 4.3237, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905273188618959e-05, |
| "loss": 4.2905, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904434593867907e-05, |
| "loss": 4.3027, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903595999116855e-05, |
| "loss": 4.3027, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902757404365803e-05, |
| "loss": 4.308, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901920447495124e-05, |
| "loss": 4.2977, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901081852744072e-05, |
| "loss": 4.2886, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90024325799302e-05, |
| "loss": 4.2911, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994046632419686e-05, |
| "loss": 4.2942, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985660684909166e-05, |
| "loss": 4.2785, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977274737398646e-05, |
| "loss": 4.2786, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968888789888125e-05, |
| "loss": 4.2749, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960502842377605e-05, |
| "loss": 4.2934, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952133273670815e-05, |
| "loss": 4.2635, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943747326160294e-05, |
| "loss": 4.2684, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935361378649774e-05, |
| "loss": 4.2657, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926991809942984e-05, |
| "loss": 4.2703, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918605862432463e-05, |
| "loss": 4.2603, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910219914921943e-05, |
| "loss": 4.2772, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8901833967411423e-05, |
| "loss": 4.2668, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88934480199009e-05, |
| "loss": 4.2601, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888506207239038e-05, |
| "loss": 4.2511, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887667612487986e-05, |
| "loss": 4.2656, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886829017736934e-05, |
| "loss": 4.2566, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885992060866256e-05, |
| "loss": 4.2623, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885155103995577e-05, |
| "loss": 4.2454, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884316509244525e-05, |
| "loss": 4.258, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883477914493473e-05, |
| "loss": 4.249, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882639319742421e-05, |
| "loss": 4.2545, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881800724991369e-05, |
| "loss": 4.2349, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880962130240316e-05, |
| "loss": 4.254, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.2355, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879286578618586e-05, |
| "loss": 4.2194, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878447983867534e-05, |
| "loss": 4.2496, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877609389116482e-05, |
| "loss": 4.2336, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87677079436543e-05, |
| "loss": 4.2343, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875932199614378e-05, |
| "loss": 4.2328, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875093604863326e-05, |
| "loss": 4.2206, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.190104007720947, |
| "eval_runtime": 524.2618, |
| "eval_samples_per_second": 727.863, |
| "eval_steps_per_second": 22.746, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874255010112274e-05, |
| "loss": 4.2013, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873416415361222e-05, |
| "loss": 4.2069, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8725794584905426e-05, |
| "loss": 4.2293, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8717408637394906e-05, |
| "loss": 4.213, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709022689884386e-05, |
| "loss": 4.2193, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700636742373866e-05, |
| "loss": 4.2089, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692267173667075e-05, |
| "loss": 4.2026, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683881226156555e-05, |
| "loss": 4.1972, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.867551165744977e-05, |
| "loss": 4.2025, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.866712570993925e-05, |
| "loss": 4.1999, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865873976242873e-05, |
| "loss": 4.2122, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865035381491821e-05, |
| "loss": 4.2065, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8642000625015155e-05, |
| "loss": 4.1916, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8633614677504635e-05, |
| "loss": 4.1838, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862522872999411e-05, |
| "loss": 4.1773, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.861684278248359e-05, |
| "loss": 4.1754, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860845683497307e-05, |
| "loss": 4.1878, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860007088746255e-05, |
| "loss": 4.1794, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.859168493995203e-05, |
| "loss": 4.1738, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858329899244151e-05, |
| "loss": 4.1922, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8574913044930995e-05, |
| "loss": 4.1786, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8566543476224204e-05, |
| "loss": 4.1744, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558157528713684e-05, |
| "loss": 4.1771, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8549771581203164e-05, |
| "loss": 4.1886, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541385633692644e-05, |
| "loss": 4.1588, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532999686182124e-05, |
| "loss": 4.1648, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524613738671604e-05, |
| "loss": 4.1763, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851624416996481e-05, |
| "loss": 4.1626, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850785822245429e-05, |
| "loss": 4.1523, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849947227494377e-05, |
| "loss": 4.1523, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849110270623698e-05, |
| "loss": 4.166, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.848271675872646e-05, |
| "loss": 4.1611, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847433081121595e-05, |
| "loss": 4.1584, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846594486370543e-05, |
| "loss": 4.1578, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845755891619491e-05, |
| "loss": 4.1582, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844917296868439e-05, |
| "loss": 4.1564, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844078702117387e-05, |
| "loss": 4.1416, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843240107366335e-05, |
| "loss": 4.1437, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842401512615282e-05, |
| "loss": 4.1299, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84156291786423e-05, |
| "loss": 4.1423, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.1389, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8398873662425e-05, |
| "loss": 4.1497, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839048771491447e-05, |
| "loss": 4.1354, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838210176740395e-05, |
| "loss": 4.1426, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.837371581989343e-05, |
| "loss": 4.1367, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836532987238292e-05, |
| "loss": 4.135, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8356960303676126e-05, |
| "loss": 4.1443, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8348574356165606e-05, |
| "loss": 4.1137, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8340188408655086e-05, |
| "loss": 4.1238, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8331802461144566e-05, |
| "loss": 4.1376, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8323432892437775e-05, |
| "loss": 4.1345, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8315046944927255e-05, |
| "loss": 4.1198, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8306660997416735e-05, |
| "loss": 4.1147, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298275049906215e-05, |
| "loss": 4.1157, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289889102395695e-05, |
| "loss": 4.1059, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281519533688904e-05, |
| "loss": 4.1291, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273133586178384e-05, |
| "loss": 4.1051, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.826474763866787e-05, |
| "loss": 4.1243, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.825636169115735e-05, |
| "loss": 4.1154, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.824797574364683e-05, |
| "loss": 4.1026, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823960617494004e-05, |
| "loss": 4.11, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823122022742952e-05, |
| "loss": 4.1002, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8222834279919e-05, |
| "loss": 4.0981, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821446471121221e-05, |
| "loss": 4.1017, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820607876370169e-05, |
| "loss": 4.118, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819769281619117e-05, |
| "loss": 4.0894, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818930686868065e-05, |
| "loss": 4.0969, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818092092117013e-05, |
| "loss": 4.0807, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817255135246334e-05, |
| "loss": 4.0978, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816416540495282e-05, |
| "loss": 4.108, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8155779457442305e-05, |
| "loss": 4.1015, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8147393509931785e-05, |
| "loss": 4.0937, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8139023941224994e-05, |
| "loss": 4.1029, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8130637993714474e-05, |
| "loss": 4.1105, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8122252046203954e-05, |
| "loss": 4.0896, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811388247749716e-05, |
| "loss": 4.0889, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810549652998664e-05, |
| "loss": 4.0909, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809711058247612e-05, |
| "loss": 4.0846, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80887246349656e-05, |
| "loss": 4.0909, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808033868745508e-05, |
| "loss": 4.0806, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.807195273994456e-05, |
| "loss": 4.0939, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806356679243404e-05, |
| "loss": 4.0892, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805518084492352e-05, |
| "loss": 4.0859, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804681127621674e-05, |
| "loss": 4.0693, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803842532870622e-05, |
| "loss": 4.0767, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80300393811957e-05, |
| "loss": 4.0835, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802165343368518e-05, |
| "loss": 4.0836, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801328386497839e-05, |
| "loss": 4.0807, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.800489791746787e-05, |
| "loss": 4.0758, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799651196995735e-05, |
| "loss": 4.0792, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798812602244683e-05, |
| "loss": 4.0689, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7979756453740036e-05, |
| "loss": 4.0628, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7971370506229516e-05, |
| "loss": 4.0782, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7962984558718996e-05, |
| "loss": 4.0699, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7954598611208476e-05, |
| "loss": 4.0737, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794622904250169e-05, |
| "loss": 4.0667, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793784309499117e-05, |
| "loss": 4.0583, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792945714748065e-05, |
| "loss": 4.0722, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792107119997013e-05, |
| "loss": 4.0783, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791270163126334e-05, |
| "loss": 4.0662, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790431568375282e-05, |
| "loss": 4.0626, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.78959297362423e-05, |
| "loss": 4.0539, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887543788731774e-05, |
| "loss": 4.0603, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787917422002499e-05, |
| "loss": 4.0658, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787078827251447e-05, |
| "loss": 4.0459, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786241870380768e-05, |
| "loss": 4.0525, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7854032756297166e-05, |
| "loss": 4.0583, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845646808786646e-05, |
| "loss": 4.0615, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837260861276126e-05, |
| "loss": 4.0478, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7828874913765606e-05, |
| "loss": 4.0537, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782048896625508e-05, |
| "loss": 4.0431, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.781210301874456e-05, |
| "loss": 4.0645, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.780371707123404e-05, |
| "loss": 4.042, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.779533112372352e-05, |
| "loss": 4.0516, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.778696155501673e-05, |
| "loss": 4.0577, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777857560750621e-05, |
| "loss": 4.0568, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7770206038799423e-05, |
| "loss": 4.0511, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7761820091288903e-05, |
| "loss": 4.041, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7753434143778383e-05, |
| "loss": 4.0483, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774504819626786e-05, |
| "loss": 4.0529, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773666224875734e-05, |
| "loss": 4.0365, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772827630124682e-05, |
| "loss": 4.041, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.77198903537363e-05, |
| "loss": 4.038, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771150440622578e-05, |
| "loss": 4.0555, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770313483751899e-05, |
| "loss": 4.0288, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769474889000847e-05, |
| "loss": 4.0369, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768636294249795e-05, |
| "loss": 4.0338, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767797699498743e-05, |
| "loss": 4.0376, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766960742628064e-05, |
| "loss": 4.0314, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766122147877012e-05, |
| "loss": 4.0504, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76528355312596e-05, |
| "loss": 4.0447, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764444958374909e-05, |
| "loss": 4.0341, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76360800150423e-05, |
| "loss": 4.026, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7627710446335506e-05, |
| "loss": 4.0444, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7619324498824986e-05, |
| "loss": 4.0379, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610938551314466e-05, |
| "loss": 4.0458, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602552603803946e-05, |
| "loss": 4.022, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7594166656293426e-05, |
| "loss": 4.0434, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7585780708782906e-05, |
| "loss": 4.034, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7577394761272386e-05, |
| "loss": 4.0431, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7569008813761866e-05, |
| "loss": 4.0242, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7560639245055075e-05, |
| "loss": 4.0382, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7552253297544555e-05, |
| "loss": 4.0286, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7543867350034035e-05, |
| "loss": 4.0157, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753548140252352e-05, |
| "loss": 4.0382, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752711183381673e-05, |
| "loss": 4.0294, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751872588630621e-05, |
| "loss": 4.0266, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751033993879569e-05, |
| "loss": 4.0236, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750195399128517e-05, |
| "loss": 4.0178, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.023409366607666, |
| "eval_runtime": 606.916, |
| "eval_samples_per_second": 628.738, |
| "eval_steps_per_second": 19.649, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749356804377465e-05, |
| "loss": 3.9994, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748518209626413e-05, |
| "loss": 4.0067, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747679614875361e-05, |
| "loss": 4.0297, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7468410201243084e-05, |
| "loss": 4.0171, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460024253732563e-05, |
| "loss": 4.0258, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451638306222043e-05, |
| "loss": 4.0089, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.744325235871152e-05, |
| "loss": 4.0108, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434866411201e-05, |
| "loss": 4.0041, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.742649684249422e-05, |
| "loss": 4.007, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74181108949837e-05, |
| "loss": 4.014, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740972494747318e-05, |
| "loss": 4.0165, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740133899996266e-05, |
| "loss": 4.0186, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739296943125587e-05, |
| "loss": 4.0043, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738458348374535e-05, |
| "loss": 3.9982, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737619753623483e-05, |
| "loss": 3.998, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736781158872431e-05, |
| "loss": 3.9878, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735942564121379e-05, |
| "loss": 4.0011, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735103969370327e-05, |
| "loss": 3.9961, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734265374619275e-05, |
| "loss": 3.9943, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733428417748596e-05, |
| "loss": 4.0179, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7325898229975444e-05, |
| "loss": 3.9971, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317512282464924e-05, |
| "loss": 4.0003, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7309126334954404e-05, |
| "loss": 4.0022, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7300740387443884e-05, |
| "loss": 4.0122, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292354439933364e-05, |
| "loss": 3.9813, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7283968492422844e-05, |
| "loss": 3.9926, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7275582544912323e-05, |
| "loss": 4.0001, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726721297620553e-05, |
| "loss": 3.9935, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725884340749874e-05, |
| "loss": 3.984, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725045745998822e-05, |
| "loss": 3.9787, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724208789128143e-05, |
| "loss": 3.9982, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723370194377091e-05, |
| "loss": 3.9973, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.72253159962604e-05, |
| "loss": 3.9864, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721693004874988e-05, |
| "loss": 3.9925, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720854410123936e-05, |
| "loss": 3.9901, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720015815372884e-05, |
| "loss": 3.9881, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719177220621832e-05, |
| "loss": 3.9779, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71833862587078e-05, |
| "loss": 3.9855, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717500031119727e-05, |
| "loss": 3.9641, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716661436368675e-05, |
| "loss": 3.9812, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715822841617623e-05, |
| "loss": 3.9808, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714984246866571e-05, |
| "loss": 3.9901, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714147289995892e-05, |
| "loss": 3.9791, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71330869524484e-05, |
| "loss": 3.9808, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712470100493788e-05, |
| "loss": 3.9828, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7116315057427366e-05, |
| "loss": 3.9762, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107929109916846e-05, |
| "loss": 3.9874, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099543162406326e-05, |
| "loss": 3.9588, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091157214895806e-05, |
| "loss": 3.9749, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082771267385286e-05, |
| "loss": 3.9817, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074418077482224e-05, |
| "loss": 3.9822, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7066032129971704e-05, |
| "loss": 3.9693, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7057646182461184e-05, |
| "loss": 3.9621, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7049260234950664e-05, |
| "loss": 3.967, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7040874287440144e-05, |
| "loss": 3.9521, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032488339929624e-05, |
| "loss": 3.9814, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024102392419104e-05, |
| "loss": 3.9608, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015716444908584e-05, |
| "loss": 3.9701, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70073468762018e-05, |
| "loss": 3.9695, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699896092869128e-05, |
| "loss": 3.9576, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699057498118076e-05, |
| "loss": 3.965, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698218903367024e-05, |
| "loss": 3.9552, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697380308615972e-05, |
| "loss": 3.956, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696543351745293e-05, |
| "loss": 3.955, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695704756994241e-05, |
| "loss": 3.972, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694866162243189e-05, |
| "loss": 3.9525, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694027567492137e-05, |
| "loss": 3.9533, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693188972741085e-05, |
| "loss": 3.9412, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692350377990033e-05, |
| "loss": 3.955, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691511783238981e-05, |
| "loss": 3.963, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690673188487928e-05, |
| "loss": 3.9664, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68983623161725e-05, |
| "loss": 3.9545, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6889976368661984e-05, |
| "loss": 3.9656, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688159042115146e-05, |
| "loss": 3.9709, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687320447364094e-05, |
| "loss": 3.955, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686483490493415e-05, |
| "loss": 3.946, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685644895742363e-05, |
| "loss": 3.9588, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6848063009913106e-05, |
| "loss": 3.9507, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6839677062402586e-05, |
| "loss": 3.953, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68313074936958e-05, |
| "loss": 3.9453, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682292154618528e-05, |
| "loss": 3.958, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6814535598674755e-05, |
| "loss": 3.9608, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6806149651164235e-05, |
| "loss": 3.9497, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679778008245745e-05, |
| "loss": 3.9348, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678939413494693e-05, |
| "loss": 3.9437, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678100818743641e-05, |
| "loss": 3.9491, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677262223992589e-05, |
| "loss": 3.9526, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676423629241537e-05, |
| "loss": 3.9479, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675585034490485e-05, |
| "loss": 3.9492, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674746439739433e-05, |
| "loss": 3.9475, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673909482868754e-05, |
| "loss": 3.9398, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673070888117702e-05, |
| "loss": 3.9363, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.67223229336665e-05, |
| "loss": 3.9418, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.671393698615598e-05, |
| "loss": 3.9464, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670556741744919e-05, |
| "loss": 3.9484, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697181469938675e-05, |
| "loss": 3.9421, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688795522428155e-05, |
| "loss": 3.9291, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6680409574917635e-05, |
| "loss": 3.9464, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6672023627407115e-05, |
| "loss": 3.9547, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663654058700324e-05, |
| "loss": 3.939, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6655268111189804e-05, |
| "loss": 3.9414, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6646882163679284e-05, |
| "loss": 3.9299, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6638496216168764e-05, |
| "loss": 3.9372, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6630110268658244e-05, |
| "loss": 3.9448, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6621724321147724e-05, |
| "loss": 3.9218, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661335475244093e-05, |
| "loss": 3.931, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660496880493041e-05, |
| "loss": 3.9398, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659658285741989e-05, |
| "loss": 3.9361, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658819690990937e-05, |
| "loss": 3.9258, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657982734120259e-05, |
| "loss": 3.9378, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657144139369207e-05, |
| "loss": 3.9231, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656305544618155e-05, |
| "loss": 3.9437, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655466949867103e-05, |
| "loss": 3.926, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654628355116051e-05, |
| "loss": 3.9293, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653791398245372e-05, |
| "loss": 3.9404, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65295280349432e-05, |
| "loss": 3.9406, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652114208743268e-05, |
| "loss": 3.9359, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651275613992216e-05, |
| "loss": 3.9255, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650438657121537e-05, |
| "loss": 3.9298, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649600062370485e-05, |
| "loss": 3.9328, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648761467619433e-05, |
| "loss": 3.9253, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647922872868381e-05, |
| "loss": 3.9288, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647085915997702e-05, |
| "loss": 3.9214, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64624732124665e-05, |
| "loss": 3.9351, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645408726495598e-05, |
| "loss": 3.923, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644570131744546e-05, |
| "loss": 3.922, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643731536993494e-05, |
| "loss": 3.9171, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642894580122815e-05, |
| "loss": 3.9272, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642055985371763e-05, |
| "loss": 3.9156, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641217390620711e-05, |
| "loss": 3.9379, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640378795869659e-05, |
| "loss": 3.9332, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6395402011186065e-05, |
| "loss": 3.9251, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638703244247928e-05, |
| "loss": 3.911, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637864649496876e-05, |
| "loss": 3.9354, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637026054745824e-05, |
| "loss": 3.9259, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636187459994772e-05, |
| "loss": 3.9297, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6353505031240936e-05, |
| "loss": 3.9155, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6345119083730416e-05, |
| "loss": 3.9264, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633673313621989e-05, |
| "loss": 3.9274, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632834718870937e-05, |
| "loss": 3.9372, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6319977620002585e-05, |
| "loss": 3.9196, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631159167249206e-05, |
| "loss": 3.9249, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630320572498154e-05, |
| "loss": 3.9213, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629481977747102e-05, |
| "loss": 3.9072, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6286450208764234e-05, |
| "loss": 3.9287, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6278064261253714e-05, |
| "loss": 3.927, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269678313743194e-05, |
| "loss": 3.9153, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261292366232674e-05, |
| "loss": 3.9178, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252906418722154e-05, |
| "loss": 3.9098, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9507577419281006, |
| "eval_runtime": 304.4686, |
| "eval_samples_per_second": 1253.302, |
| "eval_steps_per_second": 39.167, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.624453685001536e-05, |
| "loss": 3.9042, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623615090250484e-05, |
| "loss": 3.9002, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622776495499432e-05, |
| "loss": 3.9253, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.62193790074838e-05, |
| "loss": 3.9096, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621100943877701e-05, |
| "loss": 3.9248, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620262349126649e-05, |
| "loss": 3.9054, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619423754375597e-05, |
| "loss": 3.9098, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.618585159624545e-05, |
| "loss": 3.8971, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617748202753867e-05, |
| "loss": 3.9079, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616909608002815e-05, |
| "loss": 3.9091, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616071013251763e-05, |
| "loss": 3.9149, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615232418500711e-05, |
| "loss": 3.9143, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614395461630032e-05, |
| "loss": 3.9033, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61355686687898e-05, |
| "loss": 3.8984, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612718272127928e-05, |
| "loss": 3.8973, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611879677376876e-05, |
| "loss": 3.8902, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6110410826258237e-05, |
| "loss": 3.8974, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6102024878747717e-05, |
| "loss": 3.8969, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6093638931237196e-05, |
| "loss": 3.8935, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6085252983726676e-05, |
| "loss": 3.9221, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6076867036216156e-05, |
| "loss": 3.899, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606849746750937e-05, |
| "loss": 3.9, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606011151999885e-05, |
| "loss": 3.9049, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.605172557248833e-05, |
| "loss": 3.9101, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.604333962497781e-05, |
| "loss": 3.886, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.603497005627102e-05, |
| "loss": 3.8941, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.60265841087605e-05, |
| "loss": 3.9019, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.601819816124998e-05, |
| "loss": 3.8996, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.600981221373946e-05, |
| "loss": 3.8841, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600142626622894e-05, |
| "loss": 3.8842, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599307307632588e-05, |
| "loss": 3.9018, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598468712881536e-05, |
| "loss": 3.9027, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5976301181304846e-05, |
| "loss": 3.8951, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5967915233794326e-05, |
| "loss": 3.8976, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5959529286283806e-05, |
| "loss": 3.894, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5951143338773286e-05, |
| "loss": 3.8941, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5942757391262766e-05, |
| "loss": 3.882, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5934371443752246e-05, |
| "loss": 3.896, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5925985496241726e-05, |
| "loss": 3.8729, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.59175995487312e-05, |
| "loss": 3.8848, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590921360122068e-05, |
| "loss": 3.8817, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5900844032513895e-05, |
| "loss": 3.9007, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.589245808500337e-05, |
| "loss": 3.8889, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.588407213749285e-05, |
| "loss": 3.8866, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.587568618998233e-05, |
| "loss": 3.8887, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5867316621275544e-05, |
| "loss": 3.8893, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5858930673765024e-05, |
| "loss": 3.895, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5850544726254504e-05, |
| "loss": 3.8695, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5842158778743984e-05, |
| "loss": 3.8805, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.58337892100372e-05, |
| "loss": 3.8935, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582540326252667e-05, |
| "loss": 3.8904, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.581701731501615e-05, |
| "loss": 3.8788, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580863136750563e-05, |
| "loss": 3.8749, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580026179879884e-05, |
| "loss": 3.878, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579187585128832e-05, |
| "loss": 3.865, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.57834899037778e-05, |
| "loss": 3.8902, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577510395626728e-05, |
| "loss": 3.8698, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576671800875677e-05, |
| "loss": 3.8825, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575834844004998e-05, |
| "loss": 3.8826, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574996249253946e-05, |
| "loss": 3.868, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574157654502894e-05, |
| "loss": 3.873, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573319059751842e-05, |
| "loss": 3.869, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5724821028811626e-05, |
| "loss": 3.8664, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5716435081301106e-05, |
| "loss": 3.8723, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5708049133790586e-05, |
| "loss": 3.8813, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5699663186280066e-05, |
| "loss": 3.8674, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5691293617573275e-05, |
| "loss": 3.8681, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5682907670062755e-05, |
| "loss": 3.8576, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5674521722552235e-05, |
| "loss": 3.8681, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5666135775041715e-05, |
| "loss": 3.874, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.565776620633493e-05, |
| "loss": 3.8872, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564938025882441e-05, |
| "loss": 3.8661, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564099431131389e-05, |
| "loss": 3.8801, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.563260836380337e-05, |
| "loss": 3.8821, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.562423879509658e-05, |
| "loss": 3.8775, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.561585284758606e-05, |
| "loss": 3.8569, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.560746690007554e-05, |
| "loss": 3.8784, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559908095256502e-05, |
| "loss": 3.87, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559071138385823e-05, |
| "loss": 3.8642, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558232543634771e-05, |
| "loss": 3.8695, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557393948883719e-05, |
| "loss": 3.8718, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556555354132667e-05, |
| "loss": 3.8768, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5557183972619885e-05, |
| "loss": 3.8664, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5548798025109365e-05, |
| "loss": 3.8526, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5540412077598845e-05, |
| "loss": 3.8634, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5532026130088325e-05, |
| "loss": 3.8618, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5523656561381534e-05, |
| "loss": 3.8709, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5515270613871014e-05, |
| "loss": 3.8707, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5506884666360494e-05, |
| "loss": 3.8641, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5498498718849973e-05, |
| "loss": 3.8696, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5490112771339453e-05, |
| "loss": 3.856, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548174320263266e-05, |
| "loss": 3.8564, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.547335725512214e-05, |
| "loss": 3.8605, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.546497130761162e-05, |
| "loss": 3.8661, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.545658536010111e-05, |
| "loss": 3.8664, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544821579139432e-05, |
| "loss": 3.872, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.54398298438838e-05, |
| "loss": 3.8452, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543144389637328e-05, |
| "loss": 3.8682, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.542305794886276e-05, |
| "loss": 3.8738, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.541468838015597e-05, |
| "loss": 3.8611, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.540630243264545e-05, |
| "loss": 3.8607, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.539791648513493e-05, |
| "loss": 3.851, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538953053762441e-05, |
| "loss": 3.8596, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5381160968917616e-05, |
| "loss": 3.8629, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5372775021407096e-05, |
| "loss": 3.8442, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5364389073896576e-05, |
| "loss": 3.8558, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535600312638606e-05, |
| "loss": 3.8565, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.534763355767927e-05, |
| "loss": 3.8605, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533924761016875e-05, |
| "loss": 3.8452, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533086166265823e-05, |
| "loss": 3.8659, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532247571514771e-05, |
| "loss": 3.8413, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531410614644092e-05, |
| "loss": 3.8681, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.53057201989304e-05, |
| "loss": 3.8541, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.529733425141988e-05, |
| "loss": 3.8475, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528894830390936e-05, |
| "loss": 3.8641, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528057873520257e-05, |
| "loss": 3.8647, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527219278769205e-05, |
| "loss": 3.8573, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526380684018153e-05, |
| "loss": 3.8511, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5255420892671017e-05, |
| "loss": 3.8551, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5247051323964226e-05, |
| "loss": 3.8562, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238665376453706e-05, |
| "loss": 3.8476, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5230279428943186e-05, |
| "loss": 3.8524, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221893481432665e-05, |
| "loss": 3.8521, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213523912725875e-05, |
| "loss": 3.8546, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5205137965215355e-05, |
| "loss": 3.8511, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5196752017704834e-05, |
| "loss": 3.8503, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5188366070194314e-05, |
| "loss": 3.8358, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5179996501487524e-05, |
| "loss": 3.8522, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5171610553977003e-05, |
| "loss": 3.8454, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5163224606466483e-05, |
| "loss": 3.8604, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515483865895597e-05, |
| "loss": 3.8593, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514646909024918e-05, |
| "loss": 3.8543, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513808314273866e-05, |
| "loss": 3.8385, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512969719522814e-05, |
| "loss": 3.8596, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512131124771762e-05, |
| "loss": 3.8524, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511294167901083e-05, |
| "loss": 3.8575, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510455573150031e-05, |
| "loss": 3.8386, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509616978398979e-05, |
| "loss": 3.8578, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.508778383647927e-05, |
| "loss": 3.8563, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507941426777248e-05, |
| "loss": 3.865, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507102832026196e-05, |
| "loss": 3.8411, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.506264237275144e-05, |
| "loss": 3.8532, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5054256425240924e-05, |
| "loss": 3.8495, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.504588685653413e-05, |
| "loss": 3.8395, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503750090902361e-05, |
| "loss": 3.8522, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502911496151309e-05, |
| "loss": 3.8566, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502072901400257e-05, |
| "loss": 3.846, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501235944529578e-05, |
| "loss": 3.8442, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.500397349778526e-05, |
| "loss": 3.8438, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.9098222255706787, |
| "eval_runtime": 304.4905, |
| "eval_samples_per_second": 1253.211, |
| "eval_steps_per_second": 39.164, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.499558755027474e-05, |
| "loss": 3.8265, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4987201602764215e-05, |
| "loss": 3.8299, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4978815655253695e-05, |
| "loss": 3.8502, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4970429707743175e-05, |
| "loss": 3.8425, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496204376023266e-05, |
| "loss": 3.854, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495365781272214e-05, |
| "loss": 3.8371, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494527186521162e-05, |
| "loss": 3.8402, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49368859177011e-05, |
| "loss": 3.8255, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492851634899431e-05, |
| "loss": 3.8416, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492013040148379e-05, |
| "loss": 3.8404, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.491174445397327e-05, |
| "loss": 3.8402, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.490335850646275e-05, |
| "loss": 3.8459, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.489500531655969e-05, |
| "loss": 3.8312, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488661936904917e-05, |
| "loss": 3.8329, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487823342153865e-05, |
| "loss": 3.8283, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486984747402813e-05, |
| "loss": 3.8207, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486146152651761e-05, |
| "loss": 3.8302, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4853075579007095e-05, |
| "loss": 3.8297, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4844689631496575e-05, |
| "loss": 3.826, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4836303683986055e-05, |
| "loss": 3.852, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4827934115279264e-05, |
| "loss": 3.8323, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4819548167768744e-05, |
| "loss": 3.8319, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4811162220258224e-05, |
| "loss": 3.8379, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4802776272747704e-05, |
| "loss": 3.8405, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479440670404091e-05, |
| "loss": 3.8225, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478602075653039e-05, |
| "loss": 3.8294, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477763480901987e-05, |
| "loss": 3.8333, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476924886150935e-05, |
| "loss": 3.8328, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476087929280256e-05, |
| "loss": 3.8161, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475249334529205e-05, |
| "loss": 3.8189, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474410739778153e-05, |
| "loss": 3.8337, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473573782907474e-05, |
| "loss": 3.8356, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472735188156422e-05, |
| "loss": 3.83, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.47189659340537e-05, |
| "loss": 3.8318, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471057998654318e-05, |
| "loss": 3.8241, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470219403903266e-05, |
| "loss": 3.832, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469380809152214e-05, |
| "loss": 3.8186, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468542214401162e-05, |
| "loss": 3.8264, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.467705257530483e-05, |
| "loss": 3.808, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466866662779431e-05, |
| "loss": 3.8194, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466028068028379e-05, |
| "loss": 3.8167, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465189473277327e-05, |
| "loss": 3.8385, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464352516406648e-05, |
| "loss": 3.8259, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463513921655596e-05, |
| "loss": 3.8208, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.462675326904544e-05, |
| "loss": 3.8242, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461836732153492e-05, |
| "loss": 3.8266, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460999775282813e-05, |
| "loss": 3.8311, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460161180531761e-05, |
| "loss": 3.8084, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459322585780709e-05, |
| "loss": 3.813, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458483991029657e-05, |
| "loss": 3.8308, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457647034158978e-05, |
| "loss": 3.826, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.456810077288299e-05, |
| "loss": 3.8139, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455971482537247e-05, |
| "loss": 3.8102, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551328877861956e-05, |
| "loss": 3.8175, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542942930351436e-05, |
| "loss": 3.8035, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534556982840916e-05, |
| "loss": 3.8245, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4526171035330396e-05, |
| "loss": 3.8069, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4517785087819876e-05, |
| "loss": 3.8186, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4509399140309356e-05, |
| "loss": 3.8188, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4501029571602565e-05, |
| "loss": 3.8123, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4492643624092045e-05, |
| "loss": 3.8054, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4484257676581525e-05, |
| "loss": 3.8103, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4475871729071e-05, |
| "loss": 3.8024, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4467502160364214e-05, |
| "loss": 3.8116, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4459116212853694e-05, |
| "loss": 3.8193, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4450730265343174e-05, |
| "loss": 3.8074, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4442344317832654e-05, |
| "loss": 3.8056, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443397474912587e-05, |
| "loss": 3.7964, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442558880161535e-05, |
| "loss": 3.8026, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441720285410482e-05, |
| "loss": 3.8138, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44088169065943e-05, |
| "loss": 3.8209, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440044733788752e-05, |
| "loss": 3.8088, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4392061390377e-05, |
| "loss": 3.8191, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438367544286647e-05, |
| "loss": 3.8222, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437528949535595e-05, |
| "loss": 3.8163, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436691992664917e-05, |
| "loss": 3.7974, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435853397913865e-05, |
| "loss": 3.8155, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435014803162813e-05, |
| "loss": 3.8108, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434176208411761e-05, |
| "loss": 3.8019, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4333392515410824e-05, |
| "loss": 3.8068, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43250065679003e-05, |
| "loss": 3.8146, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431663699919351e-05, |
| "loss": 3.8191, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430825105168299e-05, |
| "loss": 3.8059, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429986510417247e-05, |
| "loss": 3.7902, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4291479156661946e-05, |
| "loss": 3.8066, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4283093209151426e-05, |
| "loss": 3.8053, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4274707261640906e-05, |
| "loss": 3.8058, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4266321314130386e-05, |
| "loss": 3.8111, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4257935366619865e-05, |
| "loss": 3.8072, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424956579791308e-05, |
| "loss": 3.8113, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424117985040256e-05, |
| "loss": 3.7987, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423279390289204e-05, |
| "loss": 3.7969, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.422440795538152e-05, |
| "loss": 3.799, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421603838667473e-05, |
| "loss": 3.81, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.420765243916421e-05, |
| "loss": 3.8084, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419926649165369e-05, |
| "loss": 3.8117, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419088054414317e-05, |
| "loss": 3.789, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418251097543638e-05, |
| "loss": 3.8062, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417412502792586e-05, |
| "loss": 3.8133, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416573908041534e-05, |
| "loss": 3.8078, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415735313290482e-05, |
| "loss": 3.8008, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4148983564198035e-05, |
| "loss": 3.7963, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4140597616687515e-05, |
| "loss": 3.8004, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4132211669176995e-05, |
| "loss": 3.8088, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4123825721666475e-05, |
| "loss": 3.7847, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4115456152959684e-05, |
| "loss": 3.8018, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4107070205449164e-05, |
| "loss": 3.7974, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4098684257938644e-05, |
| "loss": 3.8045, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409031468923185e-05, |
| "loss": 3.7856, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408192874172133e-05, |
| "loss": 3.8075, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407354279421081e-05, |
| "loss": 3.7957, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406515684670029e-05, |
| "loss": 3.8065, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405677089918977e-05, |
| "loss": 3.7974, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404838495167925e-05, |
| "loss": 3.7912, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403999900416873e-05, |
| "loss": 3.8069, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403161305665822e-05, |
| "loss": 3.8108, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402324348795143e-05, |
| "loss": 3.8006, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.401485754044091e-05, |
| "loss": 3.7932, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400647159293039e-05, |
| "loss": 3.7994, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399808564541987e-05, |
| "loss": 3.7995, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398971607671308e-05, |
| "loss": 3.7898, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398133012920256e-05, |
| "loss": 3.7978, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397294418169204e-05, |
| "loss": 3.7945, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396455823418152e-05, |
| "loss": 3.798, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3956188665474726e-05, |
| "loss": 3.7969, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3947802717964206e-05, |
| "loss": 3.7981, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3939416770453686e-05, |
| "loss": 3.7817, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393103082294317e-05, |
| "loss": 3.7956, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392266125423638e-05, |
| "loss": 3.7923, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.391427530672586e-05, |
| "loss": 3.8055, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.390588935921534e-05, |
| "loss": 3.7971, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.389750341170482e-05, |
| "loss": 3.8034, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388913384299803e-05, |
| "loss": 3.7841, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388074789548751e-05, |
| "loss": 3.8047, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.387236194797699e-05, |
| "loss": 3.7996, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.386397600046647e-05, |
| "loss": 3.8021, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.385560643175968e-05, |
| "loss": 3.7838, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.384722048424916e-05, |
| "loss": 3.8008, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383883453673864e-05, |
| "loss": 3.7998, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383044858922813e-05, |
| "loss": 3.8146, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3822079020521336e-05, |
| "loss": 3.7846, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3813693073010816e-05, |
| "loss": 3.7985, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3805307125500296e-05, |
| "loss": 3.7953, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3796921177989776e-05, |
| "loss": 3.7843, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3788551609282985e-05, |
| "loss": 3.7995, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3780165661772465e-05, |
| "loss": 3.7995, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3771779714261945e-05, |
| "loss": 3.7961, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3763393766751425e-05, |
| "loss": 3.7875, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3755024198044634e-05, |
| "loss": 3.7929, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.88472843170166, |
| "eval_runtime": 304.6101, |
| "eval_samples_per_second": 1252.72, |
| "eval_steps_per_second": 39.148, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3746638250534114e-05, |
| "loss": 3.7814, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3738252303023594e-05, |
| "loss": 3.7747, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372986635551308e-05, |
| "loss": 3.796, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372149678680629e-05, |
| "loss": 3.7924, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.371311083929577e-05, |
| "loss": 3.7969, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370472489178525e-05, |
| "loss": 3.7881, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.369633894427473e-05, |
| "loss": 3.7835, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368796937556794e-05, |
| "loss": 3.7754, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367959980686115e-05, |
| "loss": 3.7871, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367121385935063e-05, |
| "loss": 3.7877, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.366282791184011e-05, |
| "loss": 3.7883, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.365444196432959e-05, |
| "loss": 3.7916, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36460723956228e-05, |
| "loss": 3.7797, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363768644811228e-05, |
| "loss": 3.782, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362930050060176e-05, |
| "loss": 3.7793, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362091455309124e-05, |
| "loss": 3.7669, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361252860558072e-05, |
| "loss": 3.7809, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36041426580702e-05, |
| "loss": 3.7782, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3595756710559676e-05, |
| "loss": 3.773, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3587370763049156e-05, |
| "loss": 3.7971, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357900119434237e-05, |
| "loss": 3.7857, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3570615246831845e-05, |
| "loss": 3.7831, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3562229299321325e-05, |
| "loss": 3.7874, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3553843351810805e-05, |
| "loss": 3.7909, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.354547378310402e-05, |
| "loss": 3.7679, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.35370878355935e-05, |
| "loss": 3.7801, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352870188808298e-05, |
| "loss": 3.7855, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352031594057246e-05, |
| "loss": 3.776, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351194637186568e-05, |
| "loss": 3.7675, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.350356042435515e-05, |
| "loss": 3.7672, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.349517447684463e-05, |
| "loss": 3.7846, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3486804908137846e-05, |
| "loss": 3.7835, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347841896062732e-05, |
| "loss": 3.7811, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.34700330131168e-05, |
| "loss": 3.7816, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346164706560628e-05, |
| "loss": 3.7759, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.345326111809576e-05, |
| "loss": 3.7783, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.344487517058524e-05, |
| "loss": 3.7713, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3436489223074726e-05, |
| "loss": 3.7761, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3428119654367935e-05, |
| "loss": 3.7605, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3419733706857415e-05, |
| "loss": 3.7675, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3411347759346895e-05, |
| "loss": 3.7697, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402961811836375e-05, |
| "loss": 3.7866, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3394575864325854e-05, |
| "loss": 3.776, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3386189916815334e-05, |
| "loss": 3.7735, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3377803969304814e-05, |
| "loss": 3.7788, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3369418021794294e-05, |
| "loss": 3.7726, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3361048453087503e-05, |
| "loss": 3.7823, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3352662505576983e-05, |
| "loss": 3.7629, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.334427655806646e-05, |
| "loss": 3.7622, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.333589061055594e-05, |
| "loss": 3.7808, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.332752104184916e-05, |
| "loss": 3.7774, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331913509433864e-05, |
| "loss": 3.766, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331074914682812e-05, |
| "loss": 3.7663, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.33023631993176e-05, |
| "loss": 3.7695, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.329399363061081e-05, |
| "loss": 3.751, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.328560768310029e-05, |
| "loss": 3.7763, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.327722173558977e-05, |
| "loss": 3.7612, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326883578807925e-05, |
| "loss": 3.7656, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326046621937246e-05, |
| "loss": 3.775, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325208027186194e-05, |
| "loss": 3.7614, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324369432435142e-05, |
| "loss": 3.7618, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32353083768409e-05, |
| "loss": 3.7627, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322692242933038e-05, |
| "loss": 3.7564, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321853648181986e-05, |
| "loss": 3.7608, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321015053430934e-05, |
| "loss": 3.7721, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320176458679882e-05, |
| "loss": 3.7629, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.319339501809203e-05, |
| "loss": 3.7555, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3185009070581506e-05, |
| "loss": 3.7525, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3176623123070986e-05, |
| "loss": 3.7551, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3168237175560466e-05, |
| "loss": 3.7635, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3159851228049946e-05, |
| "loss": 3.7724, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3151465280539426e-05, |
| "loss": 3.7645, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3143079333028906e-05, |
| "loss": 3.7711, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3134709764322115e-05, |
| "loss": 3.7735, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3126323816811595e-05, |
| "loss": 3.7751, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311793786930108e-05, |
| "loss": 3.7505, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310955192179056e-05, |
| "loss": 3.7652, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310118235308377e-05, |
| "loss": 3.7679, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.309279640557325e-05, |
| "loss": 3.7523, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308441045806273e-05, |
| "loss": 3.7652, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.307602451055221e-05, |
| "loss": 3.7634, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3067671320649155e-05, |
| "loss": 3.775, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305928537313863e-05, |
| "loss": 3.7624, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305089942562811e-05, |
| "loss": 3.7436, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.304251347811759e-05, |
| "loss": 3.7589, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.303412753060707e-05, |
| "loss": 3.7587, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.302574158309655e-05, |
| "loss": 3.7643, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3017355635586035e-05, |
| "loss": 3.7606, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3008969688075515e-05, |
| "loss": 3.7632, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3000600119368724e-05, |
| "loss": 3.7642, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2992214171858204e-05, |
| "loss": 3.755, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2983828224347684e-05, |
| "loss": 3.7478, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2975442276837164e-05, |
| "loss": 3.756, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.296707270813037e-05, |
| "loss": 3.767, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295868676061985e-05, |
| "loss": 3.7628, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295030081310933e-05, |
| "loss": 3.7648, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294191486559881e-05, |
| "loss": 3.747, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.293354529689202e-05, |
| "loss": 3.7599, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.29251593493815e-05, |
| "loss": 3.7672, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291678978067472e-05, |
| "loss": 3.7635, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.29084038331642e-05, |
| "loss": 3.7557, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290001788565368e-05, |
| "loss": 3.753, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289163193814316e-05, |
| "loss": 3.7531, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.288324599063264e-05, |
| "loss": 3.7627, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.287486004312212e-05, |
| "loss": 3.7392, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28664740956116e-05, |
| "loss": 3.7569, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.285808814810108e-05, |
| "loss": 3.749, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284971857939429e-05, |
| "loss": 3.7609, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2841349010687496e-05, |
| "loss": 3.7399, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2832963063176976e-05, |
| "loss": 3.766, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2824577115666456e-05, |
| "loss": 3.7487, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.281619116815594e-05, |
| "loss": 3.7629, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.280780522064542e-05, |
| "loss": 3.7538, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.27994192731349e-05, |
| "loss": 3.7479, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279103332562438e-05, |
| "loss": 3.7591, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.278266375691759e-05, |
| "loss": 3.7662, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.277427780940707e-05, |
| "loss": 3.7572, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276589186189655e-05, |
| "loss": 3.7465, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275750591438603e-05, |
| "loss": 3.755, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274911996687551e-05, |
| "loss": 3.756, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274073401936499e-05, |
| "loss": 3.7456, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2732348071854464e-05, |
| "loss": 3.7549, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.272397850314768e-05, |
| "loss": 3.7468, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.271559255563716e-05, |
| "loss": 3.7524, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.270720660812664e-05, |
| "loss": 3.7548, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269882066061612e-05, |
| "loss": 3.7553, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2690451091909336e-05, |
| "loss": 3.7356, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2682065144398816e-05, |
| "loss": 3.7525, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267367919688829e-05, |
| "loss": 3.7467, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266529324937777e-05, |
| "loss": 3.7615, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2656923680670985e-05, |
| "loss": 3.7542, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2648537733160465e-05, |
| "loss": 3.761, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264015178564994e-05, |
| "loss": 3.7395, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2631782216943154e-05, |
| "loss": 3.7603, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2623396269432634e-05, |
| "loss": 3.757, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2615010321922114e-05, |
| "loss": 3.7565, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2606624374411594e-05, |
| "loss": 3.7466, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2598238426901074e-05, |
| "loss": 3.756, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2589852479390554e-05, |
| "loss": 3.7564, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2581466531880034e-05, |
| "loss": 3.7683, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2573080584369514e-05, |
| "loss": 3.7443, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.256471101566272e-05, |
| "loss": 3.7566, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.25563250681522e-05, |
| "loss": 3.7533, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.254793912064168e-05, |
| "loss": 3.7428, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253955317313116e-05, |
| "loss": 3.7567, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253118360442437e-05, |
| "loss": 3.7537, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252279765691385e-05, |
| "loss": 3.7595, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251441170940333e-05, |
| "loss": 3.7445, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.250602576189281e-05, |
| "loss": 3.7494, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.868333339691162, |
| "eval_runtime": 305.3931, |
| "eval_samples_per_second": 1249.508, |
| "eval_steps_per_second": 39.048, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249765619318603e-05, |
| "loss": 3.7396, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248927024567551e-05, |
| "loss": 3.7281, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248088429816499e-05, |
| "loss": 3.7547, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247249835065447e-05, |
| "loss": 3.7518, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246411240314395e-05, |
| "loss": 3.7569, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245572645563343e-05, |
| "loss": 3.7465, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244734050812291e-05, |
| "loss": 3.7371, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243895456061239e-05, |
| "loss": 3.738, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2430584991905596e-05, |
| "loss": 3.7454, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2422199044395076e-05, |
| "loss": 3.7447, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2413813096884556e-05, |
| "loss": 3.7479, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2405427149374036e-05, |
| "loss": 3.75, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239707395947098e-05, |
| "loss": 3.7409, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238868801196046e-05, |
| "loss": 3.7369, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238030206444994e-05, |
| "loss": 3.7406, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237191611693942e-05, |
| "loss": 3.723, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23635301694289e-05, |
| "loss": 3.7401, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235514422191838e-05, |
| "loss": 3.7362, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234675827440786e-05, |
| "loss": 3.732, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233837232689734e-05, |
| "loss": 3.7548, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233000275819055e-05, |
| "loss": 3.7438, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232161681068003e-05, |
| "loss": 3.7443, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.231323086316951e-05, |
| "loss": 3.7446, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230484491565899e-05, |
| "loss": 3.7473, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2296475346952206e-05, |
| "loss": 3.73, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2288089399441686e-05, |
| "loss": 3.7417, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2279703451931166e-05, |
| "loss": 3.7395, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2271317504420645e-05, |
| "loss": 3.7347, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2262947935713855e-05, |
| "loss": 3.7326, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2254561988203335e-05, |
| "loss": 3.7241, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2246176040692814e-05, |
| "loss": 3.7447, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2237806471986024e-05, |
| "loss": 3.7426, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2229420524475504e-05, |
| "loss": 3.7399, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2221034576964983e-05, |
| "loss": 3.7436, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2212648629454463e-05, |
| "loss": 3.7335, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2204262681943943e-05, |
| "loss": 3.7402, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219587673443342e-05, |
| "loss": 3.7346, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21874907869229e-05, |
| "loss": 3.7313, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217912121821612e-05, |
| "loss": 3.7214, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21707352707056e-05, |
| "loss": 3.7277, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216234932319507e-05, |
| "loss": 3.7286, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215396337568455e-05, |
| "loss": 3.7477, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214559380697777e-05, |
| "loss": 3.7346, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.213720785946725e-05, |
| "loss": 3.7305, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212882191195672e-05, |
| "loss": 3.7418, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21204359644462e-05, |
| "loss": 3.7323, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211206639573942e-05, |
| "loss": 3.7412, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21036804482289e-05, |
| "loss": 3.7263, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209529450071838e-05, |
| "loss": 3.7168, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208690855320786e-05, |
| "loss": 3.7402, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207853898450107e-05, |
| "loss": 3.7388, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207016941579428e-05, |
| "loss": 3.7287, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206178346828376e-05, |
| "loss": 3.7238, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.205339752077324e-05, |
| "loss": 3.7317, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204501157326272e-05, |
| "loss": 3.711, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2036625625752195e-05, |
| "loss": 3.7362, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2028239678241675e-05, |
| "loss": 3.7238, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2019853730731155e-05, |
| "loss": 3.7254, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2011484162024364e-05, |
| "loss": 3.7364, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200309821451385e-05, |
| "loss": 3.7235, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199471226700333e-05, |
| "loss": 3.7203, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198632631949281e-05, |
| "loss": 3.728, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197795675078602e-05, |
| "loss": 3.7153, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1969587182079236e-05, |
| "loss": 3.7235, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1961201234568716e-05, |
| "loss": 3.7309, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195281528705819e-05, |
| "loss": 3.7248, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194442933954767e-05, |
| "loss": 3.7173, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193604339203715e-05, |
| "loss": 3.7148, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192765744452663e-05, |
| "loss": 3.7134, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191927149701611e-05, |
| "loss": 3.7228, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191088554950559e-05, |
| "loss": 3.7343, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902515980798804e-05, |
| "loss": 3.7247, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1894130033288284e-05, |
| "loss": 3.7349, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1885744085777764e-05, |
| "loss": 3.7324, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1877358138267244e-05, |
| "loss": 3.738, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186898856956045e-05, |
| "loss": 3.7143, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186060262204993e-05, |
| "loss": 3.7295, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185221667453941e-05, |
| "loss": 3.7293, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184384710583262e-05, |
| "loss": 3.7092, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18354611583221e-05, |
| "loss": 3.7301, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182707521081158e-05, |
| "loss": 3.7279, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181868926330106e-05, |
| "loss": 3.7316, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181031969459427e-05, |
| "loss": 3.7245, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180195012588749e-05, |
| "loss": 3.7081, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179356417837697e-05, |
| "loss": 3.7183, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178517823086645e-05, |
| "loss": 3.7209, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177679228335593e-05, |
| "loss": 3.7251, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176840633584541e-05, |
| "loss": 3.7237, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176002038833489e-05, |
| "loss": 3.7266, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175163444082437e-05, |
| "loss": 3.7254, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174324849331385e-05, |
| "loss": 3.7195, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1734878924607056e-05, |
| "loss": 3.711, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1726492977096536e-05, |
| "loss": 3.7151, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1718107029586016e-05, |
| "loss": 3.7314, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1709721082075496e-05, |
| "loss": 3.7285, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1701351513368705e-05, |
| "loss": 3.7258, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.169298194466192e-05, |
| "loss": 3.7094, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16845959971514e-05, |
| "loss": 3.7205, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167621004964088e-05, |
| "loss": 3.7306, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166782410213036e-05, |
| "loss": 3.724, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165943815461984e-05, |
| "loss": 3.7216, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165105220710932e-05, |
| "loss": 3.7173, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16426662595988e-05, |
| "loss": 3.7177, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163428031208828e-05, |
| "loss": 3.7243, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162591074338149e-05, |
| "loss": 3.7045, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161752479587097e-05, |
| "loss": 3.7162, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160913884836045e-05, |
| "loss": 3.7124, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160075290084993e-05, |
| "loss": 3.7257, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1592399710946875e-05, |
| "loss": 3.7085, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1584013763436354e-05, |
| "loss": 3.726, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1575627815925834e-05, |
| "loss": 3.7137, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1567241868415314e-05, |
| "loss": 3.7223, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1558855920904794e-05, |
| "loss": 3.7231, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1550469973394274e-05, |
| "loss": 3.7079, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1542084025883754e-05, |
| "loss": 3.7224, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153371445717696e-05, |
| "loss": 3.7302, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152532850966644e-05, |
| "loss": 3.7183, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151694256215592e-05, |
| "loss": 3.7183, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15085566146454e-05, |
| "loss": 3.7084, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150017066713488e-05, |
| "loss": 3.7205, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149178471962436e-05, |
| "loss": 3.7169, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148339877211384e-05, |
| "loss": 3.7156, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147502920340706e-05, |
| "loss": 3.711, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146664325589654e-05, |
| "loss": 3.7136, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145825730838602e-05, |
| "loss": 3.719, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14498713608755e-05, |
| "loss": 3.7188, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144148541336497e-05, |
| "loss": 3.7002, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143309946585445e-05, |
| "loss": 3.7194, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.142471351834393e-05, |
| "loss": 3.7077, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141632757083341e-05, |
| "loss": 3.7271, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140795800212662e-05, |
| "loss": 3.7175, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13995720546161e-05, |
| "loss": 3.7267, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139118610710558e-05, |
| "loss": 3.7034, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138280015959507e-05, |
| "loss": 3.7262, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137443059088828e-05, |
| "loss": 3.7193, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136604464337776e-05, |
| "loss": 3.722, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1357658695867237e-05, |
| "loss": 3.7063, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1349272748356717e-05, |
| "loss": 3.719, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1340903179649926e-05, |
| "loss": 3.7208, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1332517232139406e-05, |
| "loss": 3.7318, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1324131284628886e-05, |
| "loss": 3.7105, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1315745337118366e-05, |
| "loss": 3.7201, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1307375768411575e-05, |
| "loss": 3.7196, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129900619970479e-05, |
| "loss": 3.7109, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129062025219427e-05, |
| "loss": 3.716, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.128223430468375e-05, |
| "loss": 3.7191, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.127384835717323e-05, |
| "loss": 3.721, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.126546240966271e-05, |
| "loss": 3.7106, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125707646215219e-05, |
| "loss": 3.7159, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.858412027359009, |
| "eval_runtime": 303.6375, |
| "eval_samples_per_second": 1256.732, |
| "eval_steps_per_second": 39.274, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.12487068934454e-05, |
| "loss": 3.7091, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.124032094593488e-05, |
| "loss": 3.694, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.123193499842436e-05, |
| "loss": 3.7168, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.122354905091384e-05, |
| "loss": 3.7157, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.121516310340332e-05, |
| "loss": 3.7177, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.12067771558928e-05, |
| "loss": 3.7139, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.119839120838228e-05, |
| "loss": 3.7045, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.119000526087176e-05, |
| "loss": 3.703, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118163569216497e-05, |
| "loss": 3.7106, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1173249744654455e-05, |
| "loss": 3.7057, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1164863797143935e-05, |
| "loss": 3.7144, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1156477849633415e-05, |
| "loss": 3.7159, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1148108280926624e-05, |
| "loss": 3.7077, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1139722333416104e-05, |
| "loss": 3.7015, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1131336385905584e-05, |
| "loss": 3.7045, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1122950438395064e-05, |
| "loss": 3.6875, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1114564490884544e-05, |
| "loss": 3.7038, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1106178543374024e-05, |
| "loss": 3.7024, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1097792595863504e-05, |
| "loss": 3.7017, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1089406648352984e-05, |
| "loss": 3.7151, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108103707964619e-05, |
| "loss": 3.7139, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.107265113213567e-05, |
| "loss": 3.7076, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.106428156342889e-05, |
| "loss": 3.7095, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.105589561591837e-05, |
| "loss": 3.7118, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.104750966840785e-05, |
| "loss": 3.6979, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103912372089733e-05, |
| "loss": 3.7089, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103073777338681e-05, |
| "loss": 3.7073, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.102235182587628e-05, |
| "loss": 3.702, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.101396587836576e-05, |
| "loss": 3.6966, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.100559630965898e-05, |
| "loss": 3.696, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.099721036214846e-05, |
| "loss": 3.7063, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.098882441463793e-05, |
| "loss": 3.7123, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.098043846712741e-05, |
| "loss": 3.7064, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.097205251961689e-05, |
| "loss": 3.7082, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096366657210638e-05, |
| "loss": 3.6963, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0955297003399586e-05, |
| "loss": 3.7067, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0946911055889066e-05, |
| "loss": 3.7068, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0938525108378546e-05, |
| "loss": 3.6915, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0930139160868026e-05, |
| "loss": 3.6916, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0921753213357506e-05, |
| "loss": 3.6931, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0913367265846986e-05, |
| "loss": 3.6942, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0904981318336466e-05, |
| "loss": 3.7135, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0896611749629675e-05, |
| "loss": 3.6996, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0888225802119155e-05, |
| "loss": 3.6942, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0879839854608635e-05, |
| "loss": 3.7107, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0871453907098115e-05, |
| "loss": 3.6999, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0863067959587595e-05, |
| "loss": 3.7067, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0854682012077075e-05, |
| "loss": 3.693, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.084629606456656e-05, |
| "loss": 3.6848, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.083791011705604e-05, |
| "loss": 3.7109, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082954054834925e-05, |
| "loss": 3.7024, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082115460083873e-05, |
| "loss": 3.6968, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.081278503213194e-05, |
| "loss": 3.6942, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080439908462142e-05, |
| "loss": 3.6898, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.07960131371109e-05, |
| "loss": 3.6818, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.078762718960038e-05, |
| "loss": 3.697, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077924124208986e-05, |
| "loss": 3.6918, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077087167338307e-05, |
| "loss": 3.694, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.076248572587255e-05, |
| "loss": 3.7034, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.075409977836203e-05, |
| "loss": 3.6914, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0745713830851515e-05, |
| "loss": 3.686, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0737327883340995e-05, |
| "loss": 3.6995, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072894193583047e-05, |
| "loss": 3.679, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0720572367123684e-05, |
| "loss": 3.6916, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0712186419613164e-05, |
| "loss": 3.6954, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0703800472102644e-05, |
| "loss": 3.6931, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069541452459212e-05, |
| "loss": 3.6851, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.06870285770816e-05, |
| "loss": 3.6846, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067864262957108e-05, |
| "loss": 3.6795, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067025668206056e-05, |
| "loss": 3.6915, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066187073455004e-05, |
| "loss": 3.7024, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.065350116584325e-05, |
| "loss": 3.6961, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.064511521833273e-05, |
| "loss": 3.698, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.063672927082221e-05, |
| "loss": 3.6987, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.062834332331169e-05, |
| "loss": 3.7057, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.06199737546049e-05, |
| "loss": 3.6806, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.061158780709438e-05, |
| "loss": 3.6994, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.060320185958386e-05, |
| "loss": 3.6993, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.059483229087707e-05, |
| "loss": 3.6731, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.058644634336655e-05, |
| "loss": 3.7024, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.057807677465976e-05, |
| "loss": 3.6918, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056969082714924e-05, |
| "loss": 3.7015, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056130487963872e-05, |
| "loss": 3.6955, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.05529189321282e-05, |
| "loss": 3.6775, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.054453298461769e-05, |
| "loss": 3.6825, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0536147037107167e-05, |
| "loss": 3.6913, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0527761089596647e-05, |
| "loss": 3.692, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0519391520889856e-05, |
| "loss": 3.6913, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0511005573379336e-05, |
| "loss": 3.6942, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0502619625868816e-05, |
| "loss": 3.6925, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0494233678358296e-05, |
| "loss": 3.6865, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0485864109651505e-05, |
| "loss": 3.6838, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0477478162140985e-05, |
| "loss": 3.6823, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0469092214630465e-05, |
| "loss": 3.6966, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0460722645923674e-05, |
| "loss": 3.696, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0452336698413154e-05, |
| "loss": 3.6992, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.044395075090264e-05, |
| "loss": 3.6747, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.043556480339212e-05, |
| "loss": 3.689, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.04271788558816e-05, |
| "loss": 3.699, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.041880928717481e-05, |
| "loss": 3.6953, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.041042333966429e-05, |
| "loss": 3.6893, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.040203739215377e-05, |
| "loss": 3.683, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.039365144464325e-05, |
| "loss": 3.6873, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.038526549713273e-05, |
| "loss": 3.6915, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.037687954962221e-05, |
| "loss": 3.6731, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036850998091542e-05, |
| "loss": 3.6896, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.03601240334049e-05, |
| "loss": 3.676, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035173808589438e-05, |
| "loss": 3.695, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.034335213838386e-05, |
| "loss": 3.6787, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033496619087334e-05, |
| "loss": 3.6887, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0326596622166554e-05, |
| "loss": 3.6849, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0318210674656034e-05, |
| "loss": 3.6935, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0309824727145514e-05, |
| "loss": 3.6901, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0301438779634994e-05, |
| "loss": 3.6759, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.02930692109282e-05, |
| "loss": 3.6911, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028468326341768e-05, |
| "loss": 3.6986, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.027629731590716e-05, |
| "loss": 3.6877, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.026791136839664e-05, |
| "loss": 3.6908, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025954179968985e-05, |
| "loss": 3.6774, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025115585217933e-05, |
| "loss": 3.6872, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.024276990466881e-05, |
| "loss": 3.6914, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.023438395715829e-05, |
| "loss": 3.6785, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.022599800964778e-05, |
| "loss": 3.6834, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.021761206213725e-05, |
| "loss": 3.6834, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020922611462673e-05, |
| "loss": 3.6899, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020084016711621e-05, |
| "loss": 3.6875, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.019247059840943e-05, |
| "loss": 3.6702, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01840846508989e-05, |
| "loss": 3.6892, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.017569870338838e-05, |
| "loss": 3.6732, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.016731275587786e-05, |
| "loss": 3.6945, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015892680836734e-05, |
| "loss": 3.6887, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015054086085682e-05, |
| "loss": 3.6962, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014217129215003e-05, |
| "loss": 3.6725, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0133785344639516e-05, |
| "loss": 3.6971, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0125399397128996e-05, |
| "loss": 3.6879, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0117013449618476e-05, |
| "loss": 3.689, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0108627502107956e-05, |
| "loss": 3.6786, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0100241554597436e-05, |
| "loss": 3.6856, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0091855607086916e-05, |
| "loss": 3.6885, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0083469659576396e-05, |
| "loss": 3.7049, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0075116469673334e-05, |
| "loss": 3.6805, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0066730522162814e-05, |
| "loss": 3.6882, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0058344574652294e-05, |
| "loss": 3.6931, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0049958627141774e-05, |
| "loss": 3.6808, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.004158905843498e-05, |
| "loss": 3.6815, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.003320311092447e-05, |
| "loss": 3.6902, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.002481716341395e-05, |
| "loss": 3.689, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.001643121590343e-05, |
| "loss": 3.6808, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.000806164719664e-05, |
| "loss": 3.6858, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.851067066192627, |
| "eval_runtime": 304.307, |
| "eval_samples_per_second": 1253.967, |
| "eval_steps_per_second": 39.187, |
| "step": 610560 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 4.2384890755488154e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|