| { | |
| "best_metric": 3.8685173988342285, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-c-command/transformer/2/checkpoints/checkpoint-457920", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 457920, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.9766, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8228, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.1813, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.9673, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.8024, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.6932, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.5963, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5331, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4421, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.4089, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3501, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3099, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989938500867749e-05, | |
| "loss": 5.2682, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 5.2006, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 5.1813, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 5.1334, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.1106, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.0804, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0426, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0182, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.0045, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 4.9756, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9436, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.9246, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9147, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790400448648195e-05, | |
| "loss": 4.8895, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8614, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8516, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8314, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 4.8165, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.8007, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 4.788, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 4.7767, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.97233456261715e-05, | |
| "loss": 4.7564, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.754, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.7528, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969818778363994e-05, | |
| "loss": 4.7291, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968980183612942e-05, | |
| "loss": 4.6918, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96814158886189e-05, | |
| "loss": 4.6965, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967302994110837e-05, | |
| "loss": 4.6778, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966464399359785e-05, | |
| "loss": 4.6813, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6677, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6605, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6397, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 4.6491, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 4.6317, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.6303, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6183, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.5972, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9589219602414374e-05, | |
| "loss": 4.5981, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958083365490385e-05, | |
| "loss": 4.6012, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957244770739333e-05, | |
| "loss": 4.5797, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 4.5709, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9555692191176016e-05, | |
| "loss": 4.5617, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9547306243665496e-05, | |
| "loss": 4.5593, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 4.5431, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 4.5645, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 4.5313, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951376245362342e-05, | |
| "loss": 4.5424, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95053765061129e-05, | |
| "loss": 4.533, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949699055860238e-05, | |
| "loss": 4.5063, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948862098989559e-05, | |
| "loss": 4.5142, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948023504238507e-05, | |
| "loss": 4.5051, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947184909487455e-05, | |
| "loss": 4.4905, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946346314736403e-05, | |
| "loss": 4.4835, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945509357865724e-05, | |
| "loss": 4.4997, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944670763114672e-05, | |
| "loss": 4.4766, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94383216836362e-05, | |
| "loss": 4.4716, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942993573612568e-05, | |
| "loss": 4.4592, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942154978861516e-05, | |
| "loss": 4.4729, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941318021990837e-05, | |
| "loss": 4.4778, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404794272397856e-05, | |
| "loss": 4.4629, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396408324887336e-05, | |
| "loss": 4.4559, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 4.4615, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379652808670025e-05, | |
| "loss": 4.457, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371266861159505e-05, | |
| "loss": 4.4481, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362880913648985e-05, | |
| "loss": 4.4302, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354494966138465e-05, | |
| "loss": 4.4384, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346125397431674e-05, | |
| "loss": 4.4257, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337739449921154e-05, | |
| "loss": 4.4239, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329353502410634e-05, | |
| "loss": 4.4129, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9320967554900114e-05, | |
| "loss": 4.4181, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931259798619332e-05, | |
| "loss": 4.4245, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930421203868281e-05, | |
| "loss": 4.4153, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.929582609117229e-05, | |
| "loss": 4.3883, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.928744014366177e-05, | |
| "loss": 4.3973, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927907057495498e-05, | |
| "loss": 4.3992, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927068462744446e-05, | |
| "loss": 4.3946, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.926229867993394e-05, | |
| "loss": 4.3894, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925392911122715e-05, | |
| "loss": 4.3856, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924554316371663e-05, | |
| "loss": 4.3814, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923715721620611e-05, | |
| "loss": 4.3686, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922877126869559e-05, | |
| "loss": 4.3744, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922038532118507e-05, | |
| "loss": 4.374, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921199937367455e-05, | |
| "loss": 4.3642, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920362980496776e-05, | |
| "loss": 4.3746, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919524385745724e-05, | |
| "loss": 4.3525, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918685790994672e-05, | |
| "loss": 4.3542, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91784719624362e-05, | |
| "loss": 4.3508, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9170086014925676e-05, | |
| "loss": 4.3512, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9161700067415156e-05, | |
| "loss": 4.3547, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9153314119904636e-05, | |
| "loss": 4.3397, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9144928172394116e-05, | |
| "loss": 4.3358, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9136558603687325e-05, | |
| "loss": 4.3329, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128172656176805e-05, | |
| "loss": 4.3359, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119786708666285e-05, | |
| "loss": 4.3184, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91114171399595e-05, | |
| "loss": 4.3116, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.910303119244898e-05, | |
| "loss": 4.3205, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.909464524493846e-05, | |
| "loss": 4.326, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908625929742794e-05, | |
| "loss": 4.3166, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907787334991742e-05, | |
| "loss": 4.3152, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90694874024069e-05, | |
| "loss": 4.3042, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906110145489638e-05, | |
| "loss": 4.3147, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905273188618959e-05, | |
| "loss": 4.2992, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904434593867907e-05, | |
| "loss": 4.305, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903595999116855e-05, | |
| "loss": 4.3068, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.902757404365803e-05, | |
| "loss": 4.3051, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901920447495124e-05, | |
| "loss": 4.2954, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901081852744072e-05, | |
| "loss": 4.2938, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90024325799302e-05, | |
| "loss": 4.293, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994046632419686e-05, | |
| "loss": 4.294, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8985660684909166e-05, | |
| "loss": 4.2822, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8977274737398646e-05, | |
| "loss": 4.2869, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968888789888125e-05, | |
| "loss": 4.2839, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960519221181335e-05, | |
| "loss": 4.2846, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952133273670815e-05, | |
| "loss": 4.2676, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943747326160294e-05, | |
| "loss": 4.2668, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935361378649774e-05, | |
| "loss": 4.2698, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926991809942984e-05, | |
| "loss": 4.2739, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918605862432463e-05, | |
| "loss": 4.2631, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910219914921943e-05, | |
| "loss": 4.276, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8901833967411423e-05, | |
| "loss": 4.2686, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88934480199009e-05, | |
| "loss": 4.2537, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888506207239038e-05, | |
| "loss": 4.2592, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88766925036836e-05, | |
| "loss": 4.2701, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886830655617308e-05, | |
| "loss": 4.2598, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885992060866256e-05, | |
| "loss": 4.2564, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885153466115204e-05, | |
| "loss": 4.2464, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884314871364151e-05, | |
| "loss": 4.2535, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883476276613099e-05, | |
| "loss": 4.2591, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882637681862047e-05, | |
| "loss": 4.2454, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881799087110995e-05, | |
| "loss": 4.2362, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880962130240316e-05, | |
| "loss": 4.2412, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880123535489264e-05, | |
| "loss": 4.2361, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879286578618586e-05, | |
| "loss": 4.233, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878447983867534e-05, | |
| "loss": 4.2422, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877609389116482e-05, | |
| "loss": 4.231, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87677079436543e-05, | |
| "loss": 4.2325, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875932199614378e-05, | |
| "loss": 4.2296, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875093604863326e-05, | |
| "loss": 4.2236, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.1937055587768555, | |
| "eval_runtime": 307.9359, | |
| "eval_samples_per_second": 1239.19, | |
| "eval_steps_per_second": 38.726, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874255010112274e-05, | |
| "loss": 4.2123, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873416415361222e-05, | |
| "loss": 4.218, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8725794584905426e-05, | |
| "loss": 4.2272, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8717408637394906e-05, | |
| "loss": 4.2165, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8709022689884386e-05, | |
| "loss": 4.217, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8700636742373866e-05, | |
| "loss": 4.2124, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8692267173667075e-05, | |
| "loss": 4.2076, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683881226156555e-05, | |
| "loss": 4.1962, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.867551165744977e-05, | |
| "loss": 4.2199, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.866712570993925e-05, | |
| "loss": 4.2037, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865873976242873e-05, | |
| "loss": 4.2127, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865035381491821e-05, | |
| "loss": 4.2119, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.864198424621142e-05, | |
| "loss": 4.1933, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.86335982987009e-05, | |
| "loss": 4.1923, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.862521235119038e-05, | |
| "loss": 4.1894, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.861682640367986e-05, | |
| "loss": 4.1826, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860844045616934e-05, | |
| "loss": 4.1879, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860005450865882e-05, | |
| "loss": 4.181, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.85916685611483e-05, | |
| "loss": 4.1765, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.858328261363778e-05, | |
| "loss": 4.1997, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857489666612726e-05, | |
| "loss": 4.1823, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8566527097420475e-05, | |
| "loss": 4.1752, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558141149909955e-05, | |
| "loss": 4.1722, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8549755202399435e-05, | |
| "loss": 4.1875, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8541369254888915e-05, | |
| "loss": 4.164, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8532983307378395e-05, | |
| "loss": 4.1628, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8524597359867875e-05, | |
| "loss": 4.1656, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8516227791161084e-05, | |
| "loss": 4.1648, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8507841843650564e-05, | |
| "loss": 4.1593, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8499455896140044e-05, | |
| "loss": 4.1592, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8491069948629524e-05, | |
| "loss": 4.1623, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8482684001119e-05, | |
| "loss": 4.1653, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847429805360848e-05, | |
| "loss": 4.1509, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8465912106097964e-05, | |
| "loss": 4.1609, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845754253739117e-05, | |
| "loss": 4.1647, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844915658988065e-05, | |
| "loss": 4.1592, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844077064237013e-05, | |
| "loss": 4.134, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.843238469485961e-05, | |
| "loss": 4.1402, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.842399874734909e-05, | |
| "loss": 4.1391, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.841561279983857e-05, | |
| "loss": 4.1446, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840722685232805e-05, | |
| "loss": 4.1387, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839884090481753e-05, | |
| "loss": 4.1453, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839047133611074e-05, | |
| "loss": 4.1326, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838210176740395e-05, | |
| "loss": 4.1426, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.837371581989343e-05, | |
| "loss": 4.141, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836532987238292e-05, | |
| "loss": 4.1408, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83569439248724e-05, | |
| "loss": 4.1368, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834855797736188e-05, | |
| "loss": 4.1181, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8340188408655086e-05, | |
| "loss": 4.1325, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8331802461144566e-05, | |
| "loss": 4.1364, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8323416513634046e-05, | |
| "loss": 4.1296, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8315030566123526e-05, | |
| "loss": 4.1251, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8306644618613006e-05, | |
| "loss": 4.1146, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8298275049906215e-05, | |
| "loss": 4.1236, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289889102395695e-05, | |
| "loss": 4.1071, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281503154885175e-05, | |
| "loss": 4.1355, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273117207374655e-05, | |
| "loss": 4.1132, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8264731259864135e-05, | |
| "loss": 4.1258, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.825636169115735e-05, | |
| "loss": 4.1239, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.824797574364683e-05, | |
| "loss": 4.097, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823958979613631e-05, | |
| "loss": 4.1139, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823120384862579e-05, | |
| "loss": 4.1078, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8222834279919e-05, | |
| "loss": 4.0975, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.821444833240848e-05, | |
| "loss": 4.0948, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820606238489796e-05, | |
| "loss": 4.1122, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819767643738744e-05, | |
| "loss": 4.1014, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818929048987692e-05, | |
| "loss": 4.0956, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818092092117013e-05, | |
| "loss": 4.0869, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817253497365961e-05, | |
| "loss": 4.1059, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816414902614909e-05, | |
| "loss": 4.1085, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.815576307863857e-05, | |
| "loss": 4.1043, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.814737713112805e-05, | |
| "loss": 4.099, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8139007562421265e-05, | |
| "loss": 4.1096, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130621614910745e-05, | |
| "loss": 4.1062, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122235667400224e-05, | |
| "loss": 4.1037, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113849719889704e-05, | |
| "loss": 4.0869, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8105480151182914e-05, | |
| "loss": 4.0996, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8097094203672393e-05, | |
| "loss": 4.092, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8088708256161873e-05, | |
| "loss": 4.0908, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808032230865135e-05, | |
| "loss": 4.088, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8071936361140827e-05, | |
| "loss": 4.0895, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8063550413630307e-05, | |
| "loss": 4.102, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805518084492352e-05, | |
| "loss": 4.0946, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8046794897413e-05, | |
| "loss": 4.0706, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803840894990248e-05, | |
| "loss": 4.0776, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803002300239196e-05, | |
| "loss": 4.0856, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802163705488144e-05, | |
| "loss": 4.082, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801326748617466e-05, | |
| "loss": 4.0813, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800488153866413e-05, | |
| "loss": 4.0783, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799649559115361e-05, | |
| "loss": 4.078, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798810964364309e-05, | |
| "loss": 4.0673, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79797400749363e-05, | |
| "loss": 4.0764, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.797135412742578e-05, | |
| "loss": 4.0744, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.796296817991526e-05, | |
| "loss": 4.0746, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.795458223240474e-05, | |
| "loss": 4.0814, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946212663697956e-05, | |
| "loss": 4.0723, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937826716187436e-05, | |
| "loss": 4.0653, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929440768676916e-05, | |
| "loss": 4.0692, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7921054821166396e-05, | |
| "loss": 4.0707, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912685252459605e-05, | |
| "loss": 4.0736, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7904299304949085e-05, | |
| "loss": 4.0641, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7895913357438565e-05, | |
| "loss": 4.0608, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7887527409928045e-05, | |
| "loss": 4.0588, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7879141462417525e-05, | |
| "loss": 4.0672, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7870771893710734e-05, | |
| "loss": 4.0524, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786240232500395e-05, | |
| "loss": 4.0419, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785401637749343e-05, | |
| "loss": 4.0595, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784563042998291e-05, | |
| "loss": 4.0604, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783724448247239e-05, | |
| "loss": 4.0533, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782885853496187e-05, | |
| "loss": 4.0552, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782047258745135e-05, | |
| "loss": 4.0496, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781208663994083e-05, | |
| "loss": 4.0573, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780370069243031e-05, | |
| "loss": 4.0451, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779533112372352e-05, | |
| "loss": 4.0526, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7786945176213e-05, | |
| "loss": 4.0599, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777855922870248e-05, | |
| "loss": 4.0596, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777018965999569e-05, | |
| "loss": 4.047, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.776180371248517e-05, | |
| "loss": 4.0467, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775341776497465e-05, | |
| "loss": 4.0513, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7745031817464134e-05, | |
| "loss": 4.0555, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736645869953614e-05, | |
| "loss": 4.0369, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7728259922443094e-05, | |
| "loss": 4.0498, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7719873974932574e-05, | |
| "loss": 4.0416, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7711488027422054e-05, | |
| "loss": 4.053, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770311845871526e-05, | |
| "loss": 4.0325, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769473251120474e-05, | |
| "loss": 4.0343, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768634656369422e-05, | |
| "loss": 4.038, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76779606161837e-05, | |
| "loss": 4.0426, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766959104747691e-05, | |
| "loss": 4.033, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766120509996639e-05, | |
| "loss": 4.0532, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765281915245587e-05, | |
| "loss": 4.0414, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764443320494535e-05, | |
| "loss": 4.0314, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.763606363623857e-05, | |
| "loss": 4.032, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762767768872805e-05, | |
| "loss": 4.0489, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761929174121753e-05, | |
| "loss": 4.039, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761090579370701e-05, | |
| "loss": 4.0369, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760253622500022e-05, | |
| "loss": 4.0295, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75941502774897e-05, | |
| "loss": 4.0375, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758576432997918e-05, | |
| "loss": 4.0403, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.757737838246866e-05, | |
| "loss": 4.0317, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7569008813761866e-05, | |
| "loss": 4.0237, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7560622866251346e-05, | |
| "loss": 4.0284, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7552236918740826e-05, | |
| "loss": 4.025, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543867350034035e-05, | |
| "loss": 4.0225, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.753548140252352e-05, | |
| "loss": 4.036, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7527095455013e-05, | |
| "loss": 4.0231, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751870950750248e-05, | |
| "loss": 4.026, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751032355999196e-05, | |
| "loss": 4.0245, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7501937612481435e-05, | |
| "loss": 4.0183, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.024468898773193, | |
| "eval_runtime": 306.7507, | |
| "eval_samples_per_second": 1243.978, | |
| "eval_steps_per_second": 38.875, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7493551664970914e-05, | |
| "loss": 4.0077, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7485165717460394e-05, | |
| "loss": 4.0176, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7476779769949874e-05, | |
| "loss": 4.0298, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7468393822439354e-05, | |
| "loss": 4.0185, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7460007874928834e-05, | |
| "loss": 4.0228, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7451621927418314e-05, | |
| "loss": 4.0139, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7443235979907794e-05, | |
| "loss": 4.0162, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7434850032397274e-05, | |
| "loss": 4.0019, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.742648046369049e-05, | |
| "loss": 4.0235, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.741809451617997e-05, | |
| "loss": 4.0205, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740970856866945e-05, | |
| "loss": 4.0164, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740132262115893e-05, | |
| "loss": 4.0214, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739293667364841e-05, | |
| "loss": 4.0041, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.738455072613789e-05, | |
| "loss": 4.0037, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.737616477862737e-05, | |
| "loss": 4.0119, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736777883111685e-05, | |
| "loss": 3.9931, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735940926241006e-05, | |
| "loss": 4.0028, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735102331489954e-05, | |
| "loss": 3.9981, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.734263736738902e-05, | |
| "loss": 3.9964, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.73342514198785e-05, | |
| "loss": 4.0218, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732588185117171e-05, | |
| "loss": 4.0012, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.731749590366119e-05, | |
| "loss": 4.0035, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7309109956150674e-05, | |
| "loss": 3.9932, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7300724008640154e-05, | |
| "loss": 4.0088, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7292354439933364e-05, | |
| "loss": 3.985, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7283968492422844e-05, | |
| "loss": 3.9911, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.7275582544912323e-05, | |
| "loss": 3.9901, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.72671965974018e-05, | |
| "loss": 3.9978, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725882702869501e-05, | |
| "loss": 3.9882, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725044108118449e-05, | |
| "loss": 3.9879, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724205513367397e-05, | |
| "loss": 3.9939, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7233669186163446e-05, | |
| "loss": 3.9988, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.722529961745666e-05, | |
| "loss": 3.9788, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.721691366994614e-05, | |
| "loss": 3.995, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.720852772243563e-05, | |
| "loss": 3.994, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.72001417749251e-05, | |
| "loss": 3.9902, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.719177220621832e-05, | |
| "loss": 3.9705, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71833862587078e-05, | |
| "loss": 3.9802, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.717500031119727e-05, | |
| "loss": 3.9701, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.716661436368675e-05, | |
| "loss": 3.986, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7158244794979966e-05, | |
| "loss": 3.9789, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7149858847469446e-05, | |
| "loss": 3.9857, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.714147289995892e-05, | |
| "loss": 3.9747, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71330869524484e-05, | |
| "loss": 3.9801, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7124717383741615e-05, | |
| "loss": 3.9845, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7116331436231095e-05, | |
| "loss": 3.986, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7107945488720575e-05, | |
| "loss": 3.9825, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7099559541210055e-05, | |
| "loss": 3.9604, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709118997250327e-05, | |
| "loss": 3.9744, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7082804024992744e-05, | |
| "loss": 3.9857, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7074418077482224e-05, | |
| "loss": 3.9709, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7066032129971704e-05, | |
| "loss": 3.9713, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705766256126492e-05, | |
| "loss": 3.9667, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704929299255813e-05, | |
| "loss": 3.972, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704090704504761e-05, | |
| "loss": 3.9578, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.703252109753709e-05, | |
| "loss": 3.9788, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.702413515002657e-05, | |
| "loss": 3.9676, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.701574920251605e-05, | |
| "loss": 3.9726, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.700736325500553e-05, | |
| "loss": 3.9803, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699897730749501e-05, | |
| "loss": 3.9486, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699059135998449e-05, | |
| "loss": 3.9669, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69822217912777e-05, | |
| "loss": 3.9598, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.697383584376718e-05, | |
| "loss": 3.9557, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.696544989625666e-05, | |
| "loss": 3.9501, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695706394874614e-05, | |
| "loss": 3.9673, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694869438003935e-05, | |
| "loss": 3.9605, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694030843252883e-05, | |
| "loss": 3.9512, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6931922485018307e-05, | |
| "loss": 3.9442, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6923536537507787e-05, | |
| "loss": 3.9634, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6915166968801e-05, | |
| "loss": 3.9703, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690678102129048e-05, | |
| "loss": 3.9622, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.689839507377996e-05, | |
| "loss": 3.961, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.689000912626944e-05, | |
| "loss": 3.9707, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.688163955756265e-05, | |
| "loss": 3.9658, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.687325361005213e-05, | |
| "loss": 3.9683, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.686486766254161e-05, | |
| "loss": 3.9458, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.685648171503109e-05, | |
| "loss": 3.9625, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6848128525128036e-05, | |
| "loss": 3.9592, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683974257761751e-05, | |
| "loss": 3.9511, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6831356630106996e-05, | |
| "loss": 3.9559, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6822970682596476e-05, | |
| "loss": 3.9554, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6814584735085956e-05, | |
| "loss": 3.9671, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6806198787575436e-05, | |
| "loss": 3.9575, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6797812840064916e-05, | |
| "loss": 3.936, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6789426892554396e-05, | |
| "loss": 3.9413, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6781057323847605e-05, | |
| "loss": 3.9516, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6772671376337085e-05, | |
| "loss": 3.9537, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6764285428826565e-05, | |
| "loss": 3.9511, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6755899481316045e-05, | |
| "loss": 3.9441, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6747529912609254e-05, | |
| "loss": 3.9491, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739143965098734e-05, | |
| "loss": 3.9368, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6730758017588214e-05, | |
| "loss": 3.9495, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722372070077694e-05, | |
| "loss": 3.9423, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6713986122567174e-05, | |
| "loss": 3.9482, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.670561655386039e-05, | |
| "loss": 3.9551, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.669723060634987e-05, | |
| "loss": 3.9476, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668884465883935e-05, | |
| "loss": 3.9387, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.668045871132883e-05, | |
| "loss": 3.9433, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.667207276381831e-05, | |
| "loss": 3.9438, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.666368681630779e-05, | |
| "loss": 3.9473, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665530086879727e-05, | |
| "loss": 3.9409, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664691492128675e-05, | |
| "loss": 3.9372, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663854535257996e-05, | |
| "loss": 3.9333, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663015940506944e-05, | |
| "loss": 3.9437, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662177345755892e-05, | |
| "loss": 3.9314, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.66133875100484e-05, | |
| "loss": 3.9211, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660503432014534e-05, | |
| "loss": 3.9346, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659664837263482e-05, | |
| "loss": 3.938, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65882624251243e-05, | |
| "loss": 3.9287, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657989285641751e-05, | |
| "loss": 3.9393, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657150690890699e-05, | |
| "loss": 3.9246, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656312096139647e-05, | |
| "loss": 3.9396, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655473501388595e-05, | |
| "loss": 3.9262, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.654634906637543e-05, | |
| "loss": 3.93, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.653797949766864e-05, | |
| "loss": 3.9444, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.652959355015812e-05, | |
| "loss": 3.9405, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65212076026476e-05, | |
| "loss": 3.9304, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.651282165513708e-05, | |
| "loss": 3.9324, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650443570762657e-05, | |
| "loss": 3.9294, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.649604976011605e-05, | |
| "loss": 3.9356, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.648768019140926e-05, | |
| "loss": 3.9218, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647929424389874e-05, | |
| "loss": 3.9353, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.647090829638822e-05, | |
| "loss": 3.9257, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64625223488777e-05, | |
| "loss": 3.9366, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6454152780170906e-05, | |
| "loss": 3.9146, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6445766832660386e-05, | |
| "loss": 3.9255, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6437380885149866e-05, | |
| "loss": 3.9209, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6428994937639346e-05, | |
| "loss": 3.926, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642060899012882e-05, | |
| "loss": 3.9195, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6412223042618306e-05, | |
| "loss": 3.9427, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640385347391152e-05, | |
| "loss": 3.9287, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6395467526401e-05, | |
| "loss": 3.922, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6387081578890475e-05, | |
| "loss": 3.9128, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6378695631379955e-05, | |
| "loss": 3.9355, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637032606267317e-05, | |
| "loss": 3.9287, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636194011516265e-05, | |
| "loss": 3.9275, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6353554167652124e-05, | |
| "loss": 3.9209, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6345168220141604e-05, | |
| "loss": 3.9251, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633679865143482e-05, | |
| "loss": 3.9301, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632841270392429e-05, | |
| "loss": 3.9212, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632002675641377e-05, | |
| "loss": 3.9171, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.631164080890326e-05, | |
| "loss": 3.921, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303271240196475e-05, | |
| "loss": 3.9181, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.629488529268595e-05, | |
| "loss": 3.9116, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.628649934517543e-05, | |
| "loss": 3.9242, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.627811339766491e-05, | |
| "loss": 3.9235, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626974382895812e-05, | |
| "loss": 3.9168, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.62613578814476e-05, | |
| "loss": 3.9151, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625297193393708e-05, | |
| "loss": 3.9082, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.949995279312134, | |
| "eval_runtime": 303.7283, | |
| "eval_samples_per_second": 1256.357, | |
| "eval_steps_per_second": 39.262, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.624458598642656e-05, | |
| "loss": 3.9135, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6236216417719766e-05, | |
| "loss": 3.9105, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6227830470209246e-05, | |
| "loss": 3.9229, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6219444522698726e-05, | |
| "loss": 3.912, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621105857518821e-05, | |
| "loss": 3.9226, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620267262767769e-05, | |
| "loss": 3.9082, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61943030589709e-05, | |
| "loss": 3.9141, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.618591711146038e-05, | |
| "loss": 3.8987, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617753116394986e-05, | |
| "loss": 3.9213, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616914521643934e-05, | |
| "loss": 3.9151, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.616077564773255e-05, | |
| "loss": 3.9146, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.615238970022203e-05, | |
| "loss": 3.9177, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.614402013151524e-05, | |
| "loss": 3.8994, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613563418400472e-05, | |
| "loss": 3.9042, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61272482364942e-05, | |
| "loss": 3.9124, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611886228898368e-05, | |
| "loss": 3.8972, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611047634147316e-05, | |
| "loss": 3.8995, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.610209039396265e-05, | |
| "loss": 3.8963, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.609370444645213e-05, | |
| "loss": 3.8976, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6085318498941607e-05, | |
| "loss": 3.924, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6076948930234816e-05, | |
| "loss": 3.9012, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6068562982724296e-05, | |
| "loss": 3.9014, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6060177035213776e-05, | |
| "loss": 3.8984, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6051791087703256e-05, | |
| "loss": 3.9053, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6043421518996465e-05, | |
| "loss": 3.889, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6035035571485945e-05, | |
| "loss": 3.8919, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6026649623975425e-05, | |
| "loss": 3.8956, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6018263676464904e-05, | |
| "loss": 3.9006, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6009877728954384e-05, | |
| "loss": 3.8869, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.60015081602476e-05, | |
| "loss": 3.8936, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599312221273708e-05, | |
| "loss": 3.8937, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.598473626522656e-05, | |
| "loss": 3.904, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.597635031771604e-05, | |
| "loss": 3.8906, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.596796437020552e-05, | |
| "loss": 3.8964, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595959480149873e-05, | |
| "loss": 3.8971, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.595120885398821e-05, | |
| "loss": 3.8972, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.594282290647769e-05, | |
| "loss": 3.8775, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.593443695896717e-05, | |
| "loss": 3.8905, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.592606739026038e-05, | |
| "loss": 3.8748, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.591768144274986e-05, | |
| "loss": 3.8934, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590929549523934e-05, | |
| "loss": 3.883, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590090954772882e-05, | |
| "loss": 3.8948, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5892539979022034e-05, | |
| "loss": 3.8853, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5884154031511514e-05, | |
| "loss": 3.8848, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5875768084000994e-05, | |
| "loss": 3.8936, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5867382136490474e-05, | |
| "loss": 3.8911, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585901256778368e-05, | |
| "loss": 3.8891, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585062662027316e-05, | |
| "loss": 3.8746, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.584224067276264e-05, | |
| "loss": 3.8794, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.583385472525212e-05, | |
| "loss": 3.895, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.582548515654533e-05, | |
| "loss": 3.8808, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.581709920903481e-05, | |
| "loss": 3.8786, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580871326152429e-05, | |
| "loss": 3.8778, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580032731401377e-05, | |
| "loss": 3.8846, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.579195774530699e-05, | |
| "loss": 3.867, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.578357179779647e-05, | |
| "loss": 3.8864, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577518585028595e-05, | |
| "loss": 3.879, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576679990277543e-05, | |
| "loss": 3.8851, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5758430334068637e-05, | |
| "loss": 3.8886, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5750044386558117e-05, | |
| "loss": 3.8673, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5741658439047596e-05, | |
| "loss": 3.8707, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5733272491537076e-05, | |
| "loss": 3.8785, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5724902922830286e-05, | |
| "loss": 3.8653, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5716516975319765e-05, | |
| "loss": 3.8647, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5708131027809245e-05, | |
| "loss": 3.8768, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5699745080298725e-05, | |
| "loss": 3.8763, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569137551159194e-05, | |
| "loss": 3.8635, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568298956408142e-05, | |
| "loss": 3.8585, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.56746036165709e-05, | |
| "loss": 3.8752, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5666217669060374e-05, | |
| "loss": 3.8823, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.565784810035359e-05, | |
| "loss": 3.8727, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564946215284307e-05, | |
| "loss": 3.8807, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564107620533255e-05, | |
| "loss": 3.8826, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563269025782202e-05, | |
| "loss": 3.8816, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.562432068911524e-05, | |
| "loss": 3.8843, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.561593474160472e-05, | |
| "loss": 3.8606, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.56075487940942e-05, | |
| "loss": 3.8776, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559916284658368e-05, | |
| "loss": 3.8793, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5590793277876895e-05, | |
| "loss": 3.8643, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5582407330366375e-05, | |
| "loss": 3.8765, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.557402138285585e-05, | |
| "loss": 3.8692, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556563543534533e-05, | |
| "loss": 3.8815, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5557265866638544e-05, | |
| "loss": 3.8758, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5548879919128024e-05, | |
| "loss": 3.8514, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55404939716175e-05, | |
| "loss": 3.8624, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553210802410698e-05, | |
| "loss": 3.8664, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552372207659646e-05, | |
| "loss": 3.8692, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5515352507889666e-05, | |
| "loss": 3.8717, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.550696656037915e-05, | |
| "loss": 3.8621, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549858061286863e-05, | |
| "loss": 3.8658, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549019466535811e-05, | |
| "loss": 3.8574, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548182509665132e-05, | |
| "loss": 3.8673, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.54734391491408e-05, | |
| "loss": 3.8596, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.546505320163028e-05, | |
| "loss": 3.8674, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.545666725411976e-05, | |
| "loss": 3.8769, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.544829768541297e-05, | |
| "loss": 3.8667, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543991173790245e-05, | |
| "loss": 3.8571, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543152579039193e-05, | |
| "loss": 3.8646, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.542313984288141e-05, | |
| "loss": 3.8627, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.541477027417462e-05, | |
| "loss": 3.8687, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5406384326664106e-05, | |
| "loss": 3.8636, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5397998379153586e-05, | |
| "loss": 3.8586, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389612431643066e-05, | |
| "loss": 3.8501, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5381242862936275e-05, | |
| "loss": 3.8684, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5372856915425755e-05, | |
| "loss": 3.8479, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5364470967915235e-05, | |
| "loss": 3.8446, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5356085020404715e-05, | |
| "loss": 3.8532, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5347715451697924e-05, | |
| "loss": 3.8574, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5339329504187404e-05, | |
| "loss": 3.8517, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5330943556676884e-05, | |
| "loss": 3.8676, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5322557609166364e-05, | |
| "loss": 3.8502, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5314188040459573e-05, | |
| "loss": 3.854, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.530580209294906e-05, | |
| "loss": 3.8547, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.529741614543854e-05, | |
| "loss": 3.8466, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528904657673175e-05, | |
| "loss": 3.8704, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.528066062922123e-05, | |
| "loss": 3.8631, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527227468171071e-05, | |
| "loss": 3.8571, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.526388873420019e-05, | |
| "loss": 3.857, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52555191654934e-05, | |
| "loss": 3.8477, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.524713321798288e-05, | |
| "loss": 3.8596, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523874727047236e-05, | |
| "loss": 3.8466, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523036132296184e-05, | |
| "loss": 3.864, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.522199175425505e-05, | |
| "loss": 3.8462, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.521360580674453e-05, | |
| "loss": 3.8574, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.520521985923401e-05, | |
| "loss": 3.8452, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5196833911723494e-05, | |
| "loss": 3.8491, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5188447964212974e-05, | |
| "loss": 3.8446, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.518007839550618e-05, | |
| "loss": 3.8513, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.517169244799566e-05, | |
| "loss": 3.8448, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.516330650048514e-05, | |
| "loss": 3.866, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.515492055297462e-05, | |
| "loss": 3.8569, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.514655098426783e-05, | |
| "loss": 3.8496, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513816503675731e-05, | |
| "loss": 3.836, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512977908924679e-05, | |
| "loss": 3.8601, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512139314173627e-05, | |
| "loss": 3.8543, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511302357302948e-05, | |
| "loss": 3.8575, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.510463762551896e-05, | |
| "loss": 3.8466, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509625167800845e-05, | |
| "loss": 3.8492, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.508786573049793e-05, | |
| "loss": 3.8554, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5079496161791136e-05, | |
| "loss": 3.8532, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5071110214280616e-05, | |
| "loss": 3.8419, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5062724266770096e-05, | |
| "loss": 3.8475, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5054338319259576e-05, | |
| "loss": 3.8448, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5045968750552785e-05, | |
| "loss": 3.8398, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5037582803042265e-05, | |
| "loss": 3.8502, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5029196855531745e-05, | |
| "loss": 3.8547, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5020827286824954e-05, | |
| "loss": 3.8419, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5012441339314434e-05, | |
| "loss": 3.8388, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5004055391803914e-05, | |
| "loss": 3.8443, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.9103078842163086, | |
| "eval_runtime": 311.761, | |
| "eval_samples_per_second": 1223.986, | |
| "eval_steps_per_second": 38.25, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.49956694442934e-05, | |
| "loss": 3.8365, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.498728349678288e-05, | |
| "loss": 3.8401, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.497889754927236e-05, | |
| "loss": 3.8498, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.497051160176184e-05, | |
| "loss": 3.8413, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.496212565425132e-05, | |
| "loss": 3.8539, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.49537397067408e-05, | |
| "loss": 3.8352, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4945353759230274e-05, | |
| "loss": 3.8426, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4936967811719754e-05, | |
| "loss": 3.8286, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.492859824301297e-05, | |
| "loss": 3.8545, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.492021229550245e-05, | |
| "loss": 3.8441, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.491184272679566e-05, | |
| "loss": 3.8403, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.490345677928514e-05, | |
| "loss": 3.8514, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.489507083177462e-05, | |
| "loss": 3.8286, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48866848842641e-05, | |
| "loss": 3.8395, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.487829893675358e-05, | |
| "loss": 3.8419, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486991298924306e-05, | |
| "loss": 3.8297, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486152704173254e-05, | |
| "loss": 3.8291, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.485314109422202e-05, | |
| "loss": 3.8274, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.484477152551523e-05, | |
| "loss": 3.8324, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4836401956808444e-05, | |
| "loss": 3.8515, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4828016009297924e-05, | |
| "loss": 3.833, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48196300617874e-05, | |
| "loss": 3.833, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481124411427688e-05, | |
| "loss": 3.8308, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.480285816676636e-05, | |
| "loss": 3.8386, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4794472219255837e-05, | |
| "loss": 3.8254, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.478608627174532e-05, | |
| "loss": 3.827, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.47777003242348e-05, | |
| "loss": 3.8245, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476933075552801e-05, | |
| "loss": 3.8348, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476094480801749e-05, | |
| "loss": 3.8177, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.475255886050697e-05, | |
| "loss": 3.8294, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.474418929180018e-05, | |
| "loss": 3.8296, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.473580334428966e-05, | |
| "loss": 3.837, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.472741739677914e-05, | |
| "loss": 3.8249, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.471903144926862e-05, | |
| "loss": 3.8299, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.471066188056183e-05, | |
| "loss": 3.8296, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.470227593305131e-05, | |
| "loss": 3.8316, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.469388998554079e-05, | |
| "loss": 3.8153, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.468550403803028e-05, | |
| "loss": 3.8208, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4677134469323486e-05, | |
| "loss": 3.8093, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4668748521812966e-05, | |
| "loss": 3.8265, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4660362574302446e-05, | |
| "loss": 3.8176, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4651976626791926e-05, | |
| "loss": 3.828, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4643607058085135e-05, | |
| "loss": 3.8192, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4635221110574615e-05, | |
| "loss": 3.8237, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4626835163064095e-05, | |
| "loss": 3.8268, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4618449215553575e-05, | |
| "loss": 3.825, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4610079646846784e-05, | |
| "loss": 3.8271, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4601693699336264e-05, | |
| "loss": 3.8145, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4593307751825744e-05, | |
| "loss": 3.8084, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4584921804315224e-05, | |
| "loss": 3.8316, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.457655223560844e-05, | |
| "loss": 3.8186, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.456818266690165e-05, | |
| "loss": 3.8163, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455979671939113e-05, | |
| "loss": 3.8153, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455141077188061e-05, | |
| "loss": 3.8232, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.454302482437009e-05, | |
| "loss": 3.8025, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.453463887685957e-05, | |
| "loss": 3.8243, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.452625292934905e-05, | |
| "loss": 3.8168, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.451786698183853e-05, | |
| "loss": 3.8195, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450948103432801e-05, | |
| "loss": 3.8269, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450111146562122e-05, | |
| "loss": 3.8097, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.44927255181107e-05, | |
| "loss": 3.8049, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.448433957060018e-05, | |
| "loss": 3.8214, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4475953623089664e-05, | |
| "loss": 3.7992, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.446758405438287e-05, | |
| "loss": 3.803, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.445919810687235e-05, | |
| "loss": 3.8157, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.445081215936183e-05, | |
| "loss": 3.8149, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.444242621185131e-05, | |
| "loss": 3.8019, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.443405664314452e-05, | |
| "loss": 3.7984, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4425670695634e-05, | |
| "loss": 3.8116, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.441728474812348e-05, | |
| "loss": 3.8186, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440889880061296e-05, | |
| "loss": 3.8149, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440052923190617e-05, | |
| "loss": 3.8136, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.439214328439565e-05, | |
| "loss": 3.8234, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.438375733688513e-05, | |
| "loss": 3.8166, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.437537138937462e-05, | |
| "loss": 3.8272, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.436700182066783e-05, | |
| "loss": 3.7997, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435861587315731e-05, | |
| "loss": 3.819, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4350246304450516e-05, | |
| "loss": 3.8165, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4341860356939996e-05, | |
| "loss": 3.8024, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4333474409429476e-05, | |
| "loss": 3.8199, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4325088461918956e-05, | |
| "loss": 3.8078, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4316702514408436e-05, | |
| "loss": 3.8181, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4308316566897916e-05, | |
| "loss": 3.8178, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4299930619387396e-05, | |
| "loss": 3.7925, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4291544671876876e-05, | |
| "loss": 3.804, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4283175103170085e-05, | |
| "loss": 3.8033, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.427478915565957e-05, | |
| "loss": 3.8123, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.426640320814905e-05, | |
| "loss": 3.8115, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.425801726063853e-05, | |
| "loss": 3.801, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.424964769193174e-05, | |
| "loss": 3.809, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.424126174442122e-05, | |
| "loss": 3.8002, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.42328757969107e-05, | |
| "loss": 3.8041, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.422448984940018e-05, | |
| "loss": 3.8014, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.421612028069339e-05, | |
| "loss": 3.8104, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.420773433318287e-05, | |
| "loss": 3.818, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419934838567235e-05, | |
| "loss": 3.8083, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.419096243816182e-05, | |
| "loss": 3.7983, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.418259286945504e-05, | |
| "loss": 3.8027, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4174206921944525e-05, | |
| "loss": 3.807, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4165820974434005e-05, | |
| "loss": 3.813, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.415743502692348e-05, | |
| "loss": 3.8063, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4149065458216694e-05, | |
| "loss": 3.8017, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4140679510706174e-05, | |
| "loss": 3.7919, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4132293563195654e-05, | |
| "loss": 3.8081, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.412390761568513e-05, | |
| "loss": 3.7917, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.411555442578207e-05, | |
| "loss": 3.7915, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.410716847827155e-05, | |
| "loss": 3.7907, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409878253076103e-05, | |
| "loss": 3.8039, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409039658325051e-05, | |
| "loss": 3.7893, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.408201063573999e-05, | |
| "loss": 3.8143, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.407362468822948e-05, | |
| "loss": 3.7898, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.406523874071895e-05, | |
| "loss": 3.8015, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.405685279320843e-05, | |
| "loss": 3.7985, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404848322450165e-05, | |
| "loss": 3.7912, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404009727699113e-05, | |
| "loss": 3.813, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.40317113294806e-05, | |
| "loss": 3.8066, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.402334176077382e-05, | |
| "loss": 3.7978, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.40149558132633e-05, | |
| "loss": 3.7998, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.400656986575277e-05, | |
| "loss": 3.7943, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.399818391824225e-05, | |
| "loss": 3.8021, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398979797073173e-05, | |
| "loss": 3.7849, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398141202322122e-05, | |
| "loss": 3.8085, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.39730260757107e-05, | |
| "loss": 3.7947, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.396464012820018e-05, | |
| "loss": 3.7978, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3956270559493386e-05, | |
| "loss": 3.7909, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3947884611982866e-05, | |
| "loss": 3.7945, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3939515043276075e-05, | |
| "loss": 3.7853, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3931129095765555e-05, | |
| "loss": 3.7986, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3922743148255035e-05, | |
| "loss": 3.7899, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3914357200744515e-05, | |
| "loss": 3.8073, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3905971253233995e-05, | |
| "loss": 3.8006, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3897585305723475e-05, | |
| "loss": 3.798, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3889199358212955e-05, | |
| "loss": 3.7795, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3880813410702434e-05, | |
| "loss": 3.8036, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.387246022079938e-05, | |
| "loss": 3.7994, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.386407427328886e-05, | |
| "loss": 3.8004, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.385568832577834e-05, | |
| "loss": 3.7907, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.384730237826782e-05, | |
| "loss": 3.7938, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.38389164307573e-05, | |
| "loss": 3.797, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.383053048324678e-05, | |
| "loss": 3.8024, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.382214453573626e-05, | |
| "loss": 3.787, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.381375858822574e-05, | |
| "loss": 3.7955, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.380538901951895e-05, | |
| "loss": 3.7863, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.379700307200843e-05, | |
| "loss": 3.788, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.378863350330164e-05, | |
| "loss": 3.8005, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3780247555791124e-05, | |
| "loss": 3.7937, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3771861608280604e-05, | |
| "loss": 3.7918, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3763475660770084e-05, | |
| "loss": 3.7856, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3755089713259564e-05, | |
| "loss": 3.7908, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.8839731216430664, | |
| "eval_runtime": 307.4054, | |
| "eval_samples_per_second": 1241.328, | |
| "eval_steps_per_second": 38.792, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3746703765749044e-05, | |
| "loss": 3.7879, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3738317818238524e-05, | |
| "loss": 3.7843, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3729931870728004e-05, | |
| "loss": 3.7993, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372156230202121e-05, | |
| "loss": 3.7915, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.371317635451069e-05, | |
| "loss": 3.7959, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.370479040700017e-05, | |
| "loss": 3.788, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.369640445948965e-05, | |
| "loss": 3.7845, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.368803489078286e-05, | |
| "loss": 3.7766, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367966532207607e-05, | |
| "loss": 3.7961, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367127937456556e-05, | |
| "loss": 3.796, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.366289342705504e-05, | |
| "loss": 3.789, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.365450747954452e-05, | |
| "loss": 3.7962, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.364613791083773e-05, | |
| "loss": 3.7753, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.363775196332721e-05, | |
| "loss": 3.7859, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3629366015816687e-05, | |
| "loss": 3.7911, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3620980068306167e-05, | |
| "loss": 3.7745, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3612594120795647e-05, | |
| "loss": 3.7783, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3604224552088856e-05, | |
| "loss": 3.7765, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3595838604578336e-05, | |
| "loss": 3.7805, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3587452657067816e-05, | |
| "loss": 3.7971, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3579066709557295e-05, | |
| "loss": 3.7821, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3570680762046775e-05, | |
| "loss": 3.7853, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3562294814536255e-05, | |
| "loss": 3.7774, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3553908867025735e-05, | |
| "loss": 3.7888, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3545522919515215e-05, | |
| "loss": 3.772, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.353715335080843e-05, | |
| "loss": 3.7773, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3528767403297904e-05, | |
| "loss": 3.7755, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3520381455787384e-05, | |
| "loss": 3.7813, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.35120118870806e-05, | |
| "loss": 3.768, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.350364231837381e-05, | |
| "loss": 3.7774, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.349525637086329e-05, | |
| "loss": 3.7813, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.348687042335277e-05, | |
| "loss": 3.7832, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.347848447584225e-05, | |
| "loss": 3.7762, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.347009852833173e-05, | |
| "loss": 3.7775, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.346171258082121e-05, | |
| "loss": 3.7798, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.345332663331069e-05, | |
| "loss": 3.7762, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.344494068580017e-05, | |
| "loss": 3.7701, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.343657111709338e-05, | |
| "loss": 3.7683, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3428201548386594e-05, | |
| "loss": 3.7624, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3419815600876074e-05, | |
| "loss": 3.7748, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3411429653365554e-05, | |
| "loss": 3.7705, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.340304370585503e-05, | |
| "loss": 3.7791, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.339465775834451e-05, | |
| "loss": 3.7701, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.338627181083399e-05, | |
| "loss": 3.7724, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.33779022421272e-05, | |
| "loss": 3.7808, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336951629461668e-05, | |
| "loss": 3.7725, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336113034710616e-05, | |
| "loss": 3.778, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.335274439959564e-05, | |
| "loss": 3.7636, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.334435845208512e-05, | |
| "loss": 3.759, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.33359725045746e-05, | |
| "loss": 3.7855, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.332758655706408e-05, | |
| "loss": 3.7691, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331920060955356e-05, | |
| "loss": 3.7676, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331083104084677e-05, | |
| "loss": 3.7654, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.330244509333625e-05, | |
| "loss": 3.7735, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.329405914582573e-05, | |
| "loss": 3.753, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.328567319831521e-05, | |
| "loss": 3.7742, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.327728725080469e-05, | |
| "loss": 3.7711, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326890130329417e-05, | |
| "loss": 3.7651, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326051535578365e-05, | |
| "loss": 3.7846, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.325214578707687e-05, | |
| "loss": 3.7592, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.324375983956635e-05, | |
| "loss": 3.7599, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.323537389205583e-05, | |
| "loss": 3.773, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3227004323349036e-05, | |
| "loss": 3.7548, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3218618375838516e-05, | |
| "loss": 3.7575, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3210232428327996e-05, | |
| "loss": 3.7642, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3201846480817476e-05, | |
| "loss": 3.7663, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3193460533306956e-05, | |
| "loss": 3.7558, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3185090964600165e-05, | |
| "loss": 3.7512, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3176705017089645e-05, | |
| "loss": 3.7598, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3168319069579125e-05, | |
| "loss": 3.7691, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3159933122068605e-05, | |
| "loss": 3.7686, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3151547174558085e-05, | |
| "loss": 3.7709, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3143161227047565e-05, | |
| "loss": 3.7715, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3134775279537045e-05, | |
| "loss": 3.7723, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3126389332026525e-05, | |
| "loss": 3.7819, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.311801976331974e-05, | |
| "loss": 3.7506, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3109633815809214e-05, | |
| "loss": 3.7756, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310126424710243e-05, | |
| "loss": 3.7707, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.309287829959191e-05, | |
| "loss": 3.7497, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.308449235208139e-05, | |
| "loss": 3.7752, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.307610640457086e-05, | |
| "loss": 3.7611, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.306772045706034e-05, | |
| "loss": 3.7719, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305935088835356e-05, | |
| "loss": 3.7728, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305096494084304e-05, | |
| "loss": 3.7507, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.304257899333252e-05, | |
| "loss": 3.7536, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3034193045822e-05, | |
| "loss": 3.7561, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.302580709831148e-05, | |
| "loss": 3.7656, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.301743752960469e-05, | |
| "loss": 3.7622, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.300905158209417e-05, | |
| "loss": 3.7577, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.300066563458365e-05, | |
| "loss": 3.7583, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.299227968707313e-05, | |
| "loss": 3.7582, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2983910118366337e-05, | |
| "loss": 3.7585, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2975524170855817e-05, | |
| "loss": 3.7525, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2967138223345296e-05, | |
| "loss": 3.7691, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2958752275834776e-05, | |
| "loss": 3.7685, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2950366328324256e-05, | |
| "loss": 3.7678, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.294199675961747e-05, | |
| "loss": 3.7491, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.293361081210695e-05, | |
| "loss": 3.7588, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.292522486459643e-05, | |
| "loss": 3.7588, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.291683891708591e-05, | |
| "loss": 3.7631, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.290845296957539e-05, | |
| "loss": 3.7623, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.29000834008686e-05, | |
| "loss": 3.7574, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.289169745335808e-05, | |
| "loss": 3.7466, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.288331150584756e-05, | |
| "loss": 3.7617, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.287492555833704e-05, | |
| "loss": 3.7501, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.286653961082652e-05, | |
| "loss": 3.7458, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.285817004211973e-05, | |
| "loss": 3.742, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284978409460921e-05, | |
| "loss": 3.7569, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.28413981470987e-05, | |
| "loss": 3.75, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.283301219958818e-05, | |
| "loss": 3.7657, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2824642630881386e-05, | |
| "loss": 3.7473, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2816256683370866e-05, | |
| "loss": 3.7575, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2807870735860346e-05, | |
| "loss": 3.755, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2799484788349826e-05, | |
| "loss": 3.7449, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2791115219643035e-05, | |
| "loss": 3.7682, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2782729272132515e-05, | |
| "loss": 3.7616, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2774343324621995e-05, | |
| "loss": 3.7526, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2765957377111475e-05, | |
| "loss": 3.7537, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2757571429600955e-05, | |
| "loss": 3.7518, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2749185482090435e-05, | |
| "loss": 3.7565, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.274081591338365e-05, | |
| "loss": 3.7377, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.273242996587313e-05, | |
| "loss": 3.7647, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.272404401836261e-05, | |
| "loss": 3.7479, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.271565807085209e-05, | |
| "loss": 3.7531, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.27072885021453e-05, | |
| "loss": 3.7519, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269890255463478e-05, | |
| "loss": 3.7491, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269051660712426e-05, | |
| "loss": 3.747, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.268213065961374e-05, | |
| "loss": 3.7512, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.267374471210322e-05, | |
| "loss": 3.7484, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.26653587645927e-05, | |
| "loss": 3.7617, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.265698919588591e-05, | |
| "loss": 3.7545, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264860324837539e-05, | |
| "loss": 3.7565, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264021730086487e-05, | |
| "loss": 3.7356, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.263183135335435e-05, | |
| "loss": 3.762, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2623461784647564e-05, | |
| "loss": 3.7522, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2615075837137044e-05, | |
| "loss": 3.7565, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2606689889626524e-05, | |
| "loss": 3.7492, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2598303942116e-05, | |
| "loss": 3.7493, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258991799460548e-05, | |
| "loss": 3.7596, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.258153204709496e-05, | |
| "loss": 3.7594, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.257314609958444e-05, | |
| "loss": 3.743, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.256476015207392e-05, | |
| "loss": 3.75, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2556390583367126e-05, | |
| "loss": 3.7477, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2548004635856606e-05, | |
| "loss": 3.7441, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253963506714982e-05, | |
| "loss": 3.7588, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.25312491196393e-05, | |
| "loss": 3.7505, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.252286317212878e-05, | |
| "loss": 3.7528, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.251447722461826e-05, | |
| "loss": 3.7378, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.250609127710774e-05, | |
| "loss": 3.7508, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.8685173988342285, | |
| "eval_runtime": 308.0819, | |
| "eval_samples_per_second": 1238.602, | |
| "eval_steps_per_second": 38.707, | |
| "step": 457920 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 3.200394765351813e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |