| { | |
| "best_metric": 4.023805618286133, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-reconstruction/transformer/1/checkpoints/checkpoint-152640", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 152640, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.974, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8153, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.1834, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.97, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.8132, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.7086, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.6021, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5349, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4463, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.3944, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3494, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3162, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989938500867749e-05, | |
| "loss": 5.2631, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 5.2043, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 5.1752, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 5.1243, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.1035, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.0788, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0407, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0161, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.0012, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 4.9747, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9468, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.9276, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9187, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790400448648195e-05, | |
| "loss": 4.8818, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8675, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8592, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8292, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756873037409844e-05, | |
| "loss": 4.8116, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.7994, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 4.7931, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 4.7811, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9723329247367764e-05, | |
| "loss": 4.7635, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.7539, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.7476, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969820416244367e-05, | |
| "loss": 4.7295, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 4.7017, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 4.6987, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 4.6724, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 4.6824, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6688, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6683, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6457, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 4.6475, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 4.6328, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.6266, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6291, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.5892, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95892359812181e-05, | |
| "loss": 4.5974, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958085003370758e-05, | |
| "loss": 4.5991, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957246408619706e-05, | |
| "loss": 4.5925, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956407813868654e-05, | |
| "loss": 4.5698, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9555692191176016e-05, | |
| "loss": 4.5606, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9547306243665496e-05, | |
| "loss": 4.5526, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 4.547, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 4.5609, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 4.5242, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951377883242715e-05, | |
| "loss": 4.5391, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.950539288491663e-05, | |
| "loss": 4.5323, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949700693740611e-05, | |
| "loss": 4.5116, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948862098989559e-05, | |
| "loss": 4.5196, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94802514211888e-05, | |
| "loss": 4.4955, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947186547367828e-05, | |
| "loss": 4.492, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 4.492, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945509357865724e-05, | |
| "loss": 4.501, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944672400995045e-05, | |
| "loss": 4.4713, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943833806243993e-05, | |
| "loss": 4.4749, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942995211492941e-05, | |
| "loss": 4.4563, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942156616741889e-05, | |
| "loss": 4.4691, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9413196598712105e-05, | |
| "loss": 4.472, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.940482703000532e-05, | |
| "loss": 4.4693, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396441082494794e-05, | |
| "loss": 4.4493, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388055134984274e-05, | |
| "loss": 4.4591, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379669187473754e-05, | |
| "loss": 4.4653, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371283239963234e-05, | |
| "loss": 4.4404, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.936291367125644e-05, | |
| "loss": 4.4347, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.935452772374592e-05, | |
| "loss": 4.4284, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.93461417762354e-05, | |
| "loss": 4.4209, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.933775582872488e-05, | |
| "loss": 4.4297, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932936988121436e-05, | |
| "loss": 4.4104, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932098393370384e-05, | |
| "loss": 4.4293, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931259798619332e-05, | |
| "loss": 4.4215, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930421203868281e-05, | |
| "loss": 4.4064, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.929584246997602e-05, | |
| "loss": 4.3896, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.92874565224655e-05, | |
| "loss": 4.3993, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927907057495498e-05, | |
| "loss": 4.3981, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927068462744446e-05, | |
| "loss": 4.4007, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.926231505873767e-05, | |
| "loss": 4.3863, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925394549003088e-05, | |
| "loss": 4.3893, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924555954252036e-05, | |
| "loss": 4.3806, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923717359500984e-05, | |
| "loss": 4.3693, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922878764749932e-05, | |
| "loss": 4.3668, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922041807879253e-05, | |
| "loss": 4.373, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921203213128201e-05, | |
| "loss": 4.3676, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920364618377149e-05, | |
| "loss": 4.3682, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919526023626097e-05, | |
| "loss": 4.3527, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918687428875045e-05, | |
| "loss": 4.3524, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917848834123993e-05, | |
| "loss": 4.3583, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917010239372941e-05, | |
| "loss": 4.357, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.916171644621889e-05, | |
| "loss": 4.349, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91533468775121e-05, | |
| "loss": 4.3451, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914497730880531e-05, | |
| "loss": 4.3366, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.913659136129479e-05, | |
| "loss": 4.333, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.912820541378427e-05, | |
| "loss": 4.3366, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911981946627375e-05, | |
| "loss": 4.3121, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111449897566966e-05, | |
| "loss": 4.3243, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9103063950056446e-05, | |
| "loss": 4.3271, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9094678002545926e-05, | |
| "loss": 4.3254, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9086292055035406e-05, | |
| "loss": 4.3133, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9077922486328615e-05, | |
| "loss": 4.3214, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9069536538818095e-05, | |
| "loss": 4.3009, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9061150591307575e-05, | |
| "loss": 4.3254, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9052764643797055e-05, | |
| "loss": 4.295, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9044378696286535e-05, | |
| "loss": 4.3072, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9036009127579744e-05, | |
| "loss": 4.3054, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9027623180069224e-05, | |
| "loss": 4.311, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901925361136244e-05, | |
| "loss": 4.3006, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901086766385192e-05, | |
| "loss": 4.2913, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90024817163414e-05, | |
| "loss": 4.2952, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.899409576883088e-05, | |
| "loss": 4.2969, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.898570982132036e-05, | |
| "loss": 4.2833, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.897732387380984e-05, | |
| "loss": 4.2829, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.896893792629932e-05, | |
| "loss": 4.2796, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.89605519787888e-05, | |
| "loss": 4.2949, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.895216603127827e-05, | |
| "loss": 4.2655, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.894378008376775e-05, | |
| "loss": 4.2705, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.893539413625723e-05, | |
| "loss": 4.2703, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.892702456755045e-05, | |
| "loss": 4.2727, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891863862003992e-05, | |
| "loss": 4.2633, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891025267252941e-05, | |
| "loss": 4.2803, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890186672501889e-05, | |
| "loss": 4.2684, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889348077750837e-05, | |
| "loss": 4.2632, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888509482999785e-05, | |
| "loss": 4.2545, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887670888248733e-05, | |
| "loss": 4.2671, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886833931378054e-05, | |
| "loss": 4.2617, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885995336627002e-05, | |
| "loss": 4.2648, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88515674187595e-05, | |
| "loss": 4.2499, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884318147124898e-05, | |
| "loss": 4.2623, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8834811902542186e-05, | |
| "loss": 4.2535, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8826425955031666e-05, | |
| "loss": 4.2572, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8818040007521146e-05, | |
| "loss": 4.2385, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8809654060010626e-05, | |
| "loss": 4.2581, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880128449130384e-05, | |
| "loss": 4.2381, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879289854379332e-05, | |
| "loss": 4.2221, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87845125962828e-05, | |
| "loss": 4.2543, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877612664877228e-05, | |
| "loss": 4.2372, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.876774070126176e-05, | |
| "loss": 4.2377, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875935475375124e-05, | |
| "loss": 4.2374, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875098518504445e-05, | |
| "loss": 4.2239, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.193942070007324, | |
| "eval_runtime": 322.1087, | |
| "eval_samples_per_second": 1184.665, | |
| "eval_steps_per_second": 37.022, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.874259923753393e-05, | |
| "loss": 4.2036, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.873421329002341e-05, | |
| "loss": 4.2116, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872582734251289e-05, | |
| "loss": 4.2304, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.87174577738061e-05, | |
| "loss": 4.2168, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870907182629558e-05, | |
| "loss": 4.2205, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.870068587878506e-05, | |
| "loss": 4.2112, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.869229993127454e-05, | |
| "loss": 4.2052, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8683913983764027e-05, | |
| "loss": 4.1993, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675544415057236e-05, | |
| "loss": 4.205, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667158467546716e-05, | |
| "loss": 4.2029, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8658772520036196e-05, | |
| "loss": 4.2163, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8650386572525676e-05, | |
| "loss": 4.2105, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8642033382622614e-05, | |
| "loss": 4.1943, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8633647435112094e-05, | |
| "loss": 4.1863, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8625261487601574e-05, | |
| "loss": 4.1803, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8616875540091054e-05, | |
| "loss": 4.1793, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608489592580534e-05, | |
| "loss": 4.1916, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8600103645070014e-05, | |
| "loss": 4.1807, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591717697559493e-05, | |
| "loss": 4.1762, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.858333175004898e-05, | |
| "loss": 4.1956, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.857496218134219e-05, | |
| "loss": 4.1822, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856657623383167e-05, | |
| "loss": 4.1777, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.855819028632115e-05, | |
| "loss": 4.1804, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854980433881063e-05, | |
| "loss": 4.1905, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854143477010384e-05, | |
| "loss": 4.1627, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853304882259332e-05, | |
| "loss": 4.1674, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.85246628750828e-05, | |
| "loss": 4.1768, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851627692757228e-05, | |
| "loss": 4.1657, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850790735886549e-05, | |
| "loss": 4.1553, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849952141135497e-05, | |
| "loss": 4.1544, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849115184264818e-05, | |
| "loss": 4.1685, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848276589513766e-05, | |
| "loss": 4.1648, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847437994762714e-05, | |
| "loss": 4.16, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.846599400011662e-05, | |
| "loss": 4.1605, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84576080526061e-05, | |
| "loss": 4.1617, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844922210509558e-05, | |
| "loss": 4.1602, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8440836157585056e-05, | |
| "loss": 4.1439, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8432450210074536e-05, | |
| "loss": 4.1464, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.842408064136775e-05, | |
| "loss": 4.1332, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8415694693857225e-05, | |
| "loss": 4.1448, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8407308746346705e-05, | |
| "loss": 4.1412, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8398922798836185e-05, | |
| "loss": 4.1523, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83905532301294e-05, | |
| "loss": 4.1376, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.838216728261888e-05, | |
| "loss": 4.1437, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.837378133510836e-05, | |
| "loss": 4.1385, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.836539538759784e-05, | |
| "loss": 4.1411, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8357025818891057e-05, | |
| "loss": 4.1469, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834863987138053e-05, | |
| "loss": 4.1161, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8340270302673746e-05, | |
| "loss": 4.1265, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8331884355163226e-05, | |
| "loss": 4.1414, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.83234984076527e-05, | |
| "loss": 4.1349, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831511246014218e-05, | |
| "loss": 4.1233, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8306742891435395e-05, | |
| "loss": 4.1172, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8298356943924875e-05, | |
| "loss": 4.1177, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8289970996414354e-05, | |
| "loss": 4.1075, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8281585048903834e-05, | |
| "loss": 4.1304, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8273199101393314e-05, | |
| "loss": 4.1068, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8264813153882794e-05, | |
| "loss": 4.125, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8256427206372274e-05, | |
| "loss": 4.1177, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8248041258861754e-05, | |
| "loss": 4.1059, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823967169015496e-05, | |
| "loss": 4.1131, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823128574264444e-05, | |
| "loss": 4.1031, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.822289979513392e-05, | |
| "loss": 4.1001, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.82145138476234e-05, | |
| "loss": 4.1044, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820614427891661e-05, | |
| "loss": 4.1208, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.819775833140609e-05, | |
| "loss": 4.0917, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818937238389558e-05, | |
| "loss": 4.1001, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818098643638506e-05, | |
| "loss": 4.0832, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817261686767827e-05, | |
| "loss": 4.0996, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.816423092016775e-05, | |
| "loss": 4.1138, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.815584497265723e-05, | |
| "loss": 4.106, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.814745902514671e-05, | |
| "loss": 4.0971, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.813908945643992e-05, | |
| "loss": 4.1046, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.81307035089294e-05, | |
| "loss": 4.1107, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.812231756141888e-05, | |
| "loss": 4.0926, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.811393161390836e-05, | |
| "loss": 4.091, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8105562045201566e-05, | |
| "loss": 4.0918, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8097176097691046e-05, | |
| "loss": 4.0881, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808879015018053e-05, | |
| "loss": 4.0911, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808040420267001e-05, | |
| "loss": 4.0838, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.807203463396322e-05, | |
| "loss": 4.0974, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80636486864527e-05, | |
| "loss": 4.0922, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805526273894218e-05, | |
| "loss": 4.0875, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804687679143166e-05, | |
| "loss": 4.0692, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803850722272487e-05, | |
| "loss": 4.0781, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803012127521435e-05, | |
| "loss": 4.0863, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802173532770383e-05, | |
| "loss": 4.0863, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801334938019331e-05, | |
| "loss": 4.0828, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800497981148652e-05, | |
| "loss": 4.0799, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7996593863976e-05, | |
| "loss": 4.0813, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7988207916465486e-05, | |
| "loss": 4.0694, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7979821968954966e-05, | |
| "loss": 4.0655, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7971452400248175e-05, | |
| "loss": 4.0809, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7963082831541385e-05, | |
| "loss": 4.0717, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7954696884030864e-05, | |
| "loss": 4.0773, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946310936520344e-05, | |
| "loss": 4.0691, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7937924989009824e-05, | |
| "loss": 4.0613, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7929539041499304e-05, | |
| "loss": 4.0743, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7921153093988784e-05, | |
| "loss": 4.0789, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912767146478264e-05, | |
| "loss": 4.0695, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.790439757777147e-05, | |
| "loss": 4.0663, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.789601163026095e-05, | |
| "loss": 4.0576, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.788762568275043e-05, | |
| "loss": 4.0619, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787923973523992e-05, | |
| "loss": 4.0696, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.787087016653313e-05, | |
| "loss": 4.0477, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.786248421902261e-05, | |
| "loss": 4.055, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785409827151209e-05, | |
| "loss": 4.0607, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784571232400157e-05, | |
| "loss": 4.065, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783734275529478e-05, | |
| "loss": 4.0502, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782897318658799e-05, | |
| "loss": 4.0578, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782058723907747e-05, | |
| "loss": 4.0455, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781220129156695e-05, | |
| "loss": 4.0666, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780381534405643e-05, | |
| "loss": 4.0448, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779542939654591e-05, | |
| "loss": 4.0532, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778704344903539e-05, | |
| "loss": 4.0594, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7778657501524874e-05, | |
| "loss": 4.0584, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777028793281808e-05, | |
| "loss": 4.0552, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.776190198530756e-05, | |
| "loss": 4.0413, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.775351603779704e-05, | |
| "loss": 4.0501, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774513009028652e-05, | |
| "loss": 4.0539, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7736744142776e-05, | |
| "loss": 4.04, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772835819526548e-05, | |
| "loss": 4.0447, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7719972247754956e-05, | |
| "loss": 4.0388, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7711586300244436e-05, | |
| "loss": 4.0563, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770321673153765e-05, | |
| "loss": 4.0295, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769483078402713e-05, | |
| "loss": 4.0401, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768644483651661e-05, | |
| "loss": 4.0371, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767805888900609e-05, | |
| "loss": 4.041, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766968932029931e-05, | |
| "loss": 4.0332, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766130337278878e-05, | |
| "loss": 4.0515, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.765291742527826e-05, | |
| "loss": 4.0445, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7644547856571476e-05, | |
| "loss": 4.0355, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7636161909060956e-05, | |
| "loss": 4.0293, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762777596155043e-05, | |
| "loss": 4.0439, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761939001403991e-05, | |
| "loss": 4.0399, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761100406652939e-05, | |
| "loss": 4.0494, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.760261811901887e-05, | |
| "loss": 4.0236, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.759423217150835e-05, | |
| "loss": 4.045, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.758584622399783e-05, | |
| "loss": 4.0364, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7577476655291045e-05, | |
| "loss": 4.0456, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7569107086584254e-05, | |
| "loss": 4.025, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7560721139073734e-05, | |
| "loss": 4.039, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7552335191563214e-05, | |
| "loss": 4.0294, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543949244052694e-05, | |
| "loss": 4.0173, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7535563296542174e-05, | |
| "loss": 4.0402, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7527177349031654e-05, | |
| "loss": 4.0333, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7518791401521134e-05, | |
| "loss": 4.0291, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7510405454010614e-05, | |
| "loss": 4.0246, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.750203588530382e-05, | |
| "loss": 4.0217, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.023805618286133, | |
| "eval_runtime": 322.2873, | |
| "eval_samples_per_second": 1184.009, | |
| "eval_steps_per_second": 37.001, | |
| "step": 152640 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 1.0596222688872038e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |