| { | |
| "best_metric": 3.878344774246216, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/transformer/2/checkpoints/checkpoint-534240", | |
| "epoch": 0.025000606015738065, | |
| "eval_steps": 10, | |
| "global_step": 534240, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.9166, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8283, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.2048, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.9964, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.8327, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.7348, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.6227, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5631, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4813, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.4183, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3724, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3348, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 5.2877, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 5.2345, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 5.1989, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 5.1625, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.1257, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.1041, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0766, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0445, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.0345, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9823927859886547e-05, | |
| "loss": 5.0011, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9832, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.956, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9435, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790400448648195e-05, | |
| "loss": 4.9146, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8953, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8822, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8622, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756873037409844e-05, | |
| "loss": 4.8349, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.8325, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740101142388804e-05, | |
| "loss": 4.8213, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731715194878284e-05, | |
| "loss": 4.8074, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.97233456261715e-05, | |
| "loss": 4.7883, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.7844, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.7602, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969818778363994e-05, | |
| "loss": 4.7506, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 4.7242, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 4.725, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 4.7118, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 4.7087, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6969, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6931, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6733, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96311165823595e-05, | |
| "loss": 4.6683, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962273063484898e-05, | |
| "loss": 4.661, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.6458, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6523, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.6296, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958920322361064e-05, | |
| "loss": 4.6138, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958081727610012e-05, | |
| "loss": 4.6249, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957244770739333e-05, | |
| "loss": 4.6042, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956406175988281e-05, | |
| "loss": 4.5984, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.955567581237229e-05, | |
| "loss": 4.5776, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9547306243665496e-05, | |
| "loss": 4.5866, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 4.5636, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 4.5956, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9522148401133936e-05, | |
| "loss": 4.5581, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951376245362342e-05, | |
| "loss": 4.5709, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95053765061129e-05, | |
| "loss": 4.5619, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949699055860238e-05, | |
| "loss": 4.5366, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948860461109186e-05, | |
| "loss": 4.5299, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948023504238507e-05, | |
| "loss": 4.535, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947184909487455e-05, | |
| "loss": 4.518, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946346314736403e-05, | |
| "loss": 4.5114, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945507719985351e-05, | |
| "loss": 4.5302, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944670763114672e-05, | |
| "loss": 4.5107, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94383216836362e-05, | |
| "loss": 4.4995, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942993573612568e-05, | |
| "loss": 4.4835, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942156616741889e-05, | |
| "loss": 4.4833, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.941318021990837e-05, | |
| "loss": 4.4932, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404794272397856e-05, | |
| "loss": 4.494, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396408324887336e-05, | |
| "loss": 4.4816, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388022377376816e-05, | |
| "loss": 4.4806, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379636429866296e-05, | |
| "loss": 4.4788, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371250482355776e-05, | |
| "loss": 4.4716, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362880913648985e-05, | |
| "loss": 4.4609, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354494966138465e-05, | |
| "loss": 4.4556, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346109018627945e-05, | |
| "loss": 4.4518, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337723071117425e-05, | |
| "loss": 4.4405, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329337123606905e-05, | |
| "loss": 4.4415, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.932095117609638e-05, | |
| "loss": 4.4445, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.931256522858586e-05, | |
| "loss": 4.4426, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.930417928107534e-05, | |
| "loss": 4.4273, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9295809712368554e-05, | |
| "loss": 4.4064, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9287423764858034e-05, | |
| "loss": 4.4256, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9279037817347514e-05, | |
| "loss": 4.4207, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9270651869836994e-05, | |
| "loss": 4.4195, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.92622823011302e-05, | |
| "loss": 4.4117, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925391273242342e-05, | |
| "loss": 4.4124, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.92455267849129e-05, | |
| "loss": 4.4016, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923714083740238e-05, | |
| "loss": 4.3898, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922875488989185e-05, | |
| "loss": 4.3966, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922036894238133e-05, | |
| "loss": 4.3928, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921198299487081e-05, | |
| "loss": 4.4008, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920359704736029e-05, | |
| "loss": 4.3973, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919521109984978e-05, | |
| "loss": 4.3903, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918684153114299e-05, | |
| "loss": 4.3711, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917845558363247e-05, | |
| "loss": 4.3732, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.917006963612195e-05, | |
| "loss": 4.376, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9161700067415156e-05, | |
| "loss": 4.3736, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9153314119904636e-05, | |
| "loss": 4.3631, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9144928172394116e-05, | |
| "loss": 4.3643, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9136542224883596e-05, | |
| "loss": 4.3523, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128156277373076e-05, | |
| "loss": 4.3675, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119786708666285e-05, | |
| "loss": 4.3378, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9111400761155765e-05, | |
| "loss": 4.3497, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9103014813645245e-05, | |
| "loss": 4.35, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.909462886613473e-05, | |
| "loss": 4.3402, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908624291862421e-05, | |
| "loss": 4.3381, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907785697111369e-05, | |
| "loss": 4.3407, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906947102360317e-05, | |
| "loss": 4.3324, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906108507609265e-05, | |
| "loss": 4.3383, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905271550738586e-05, | |
| "loss": 4.3341, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.904432955987534e-05, | |
| "loss": 4.3254, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903594361236482e-05, | |
| "loss": 4.3291, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.902757404365803e-05, | |
| "loss": 4.3337, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901920447495124e-05, | |
| "loss": 4.3227, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.901081852744072e-05, | |
| "loss": 4.3126, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90024325799302e-05, | |
| "loss": 4.3157, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994046632419686e-05, | |
| "loss": 4.3178, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8985660684909166e-05, | |
| "loss": 4.3104, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8977274737398646e-05, | |
| "loss": 4.2997, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968888789888125e-05, | |
| "loss": 4.3077, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960519221181335e-05, | |
| "loss": 4.3121, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952133273670815e-05, | |
| "loss": 4.3025, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943747326160294e-05, | |
| "loss": 4.2989, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935377757453504e-05, | |
| "loss": 4.2894, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8926991809942984e-05, | |
| "loss": 4.2931, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8918605862432463e-05, | |
| "loss": 4.2871, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8910219914921943e-05, | |
| "loss": 4.3012, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8901833967411423e-05, | |
| "loss": 4.2928, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88934480199009e-05, | |
| "loss": 4.2736, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888506207239038e-05, | |
| "loss": 4.2811, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.887667612487986e-05, | |
| "loss": 4.2871, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886830655617308e-05, | |
| "loss": 4.28, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885993698746629e-05, | |
| "loss": 4.2854, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885155103995577e-05, | |
| "loss": 4.2714, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884316509244525e-05, | |
| "loss": 4.279, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883477914493473e-05, | |
| "loss": 4.2809, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882639319742421e-05, | |
| "loss": 4.2697, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.881800724991369e-05, | |
| "loss": 4.2672, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880962130240316e-05, | |
| "loss": 4.268, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.880123535489264e-05, | |
| "loss": 4.2566, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879286578618586e-05, | |
| "loss": 4.2551, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878447983867534e-05, | |
| "loss": 4.2694, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877609389116482e-05, | |
| "loss": 4.2469, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.87677079436543e-05, | |
| "loss": 4.253, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875933837494751e-05, | |
| "loss": 4.2581, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8750952427436986e-05, | |
| "loss": 4.2526, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.215147495269775, | |
| "eval_runtime": 304.8841, | |
| "eval_samples_per_second": 1251.593, | |
| "eval_steps_per_second": 39.113, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8742566479926466e-05, | |
| "loss": 4.2344, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8734180532415946e-05, | |
| "loss": 4.236, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.872581096370916e-05, | |
| "loss": 4.255, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8717425016198635e-05, | |
| "loss": 4.2389, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8709039068688115e-05, | |
| "loss": 4.2493, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8700653121177595e-05, | |
| "loss": 4.2222, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.869228355247081e-05, | |
| "loss": 4.2406, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.868389760496029e-05, | |
| "loss": 4.2264, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8675528036253507e-05, | |
| "loss": 4.2254, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8667142088742986e-05, | |
| "loss": 4.2283, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865875614123246e-05, | |
| "loss": 4.2277, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.865037019372194e-05, | |
| "loss": 4.2317, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8642017003818885e-05, | |
| "loss": 4.2154, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8633631056308365e-05, | |
| "loss": 4.2123, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8625245108797845e-05, | |
| "loss": 4.2101, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8616859161287324e-05, | |
| "loss": 4.2067, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8608473213776804e-05, | |
| "loss": 4.2051, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.860008726626628e-05, | |
| "loss": 4.2084, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8591701318755764e-05, | |
| "loss": 4.2063, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8583315371245244e-05, | |
| "loss": 4.229, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8574929423734724e-05, | |
| "loss": 4.2041, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8566543476224204e-05, | |
| "loss": 4.2086, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8558157528713684e-05, | |
| "loss": 4.2011, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.8549771581203164e-05, | |
| "loss": 4.2066, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.854140201249637e-05, | |
| "loss": 4.1972, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.853301606498585e-05, | |
| "loss": 4.1946, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.852463011747533e-05, | |
| "loss": 4.1939, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.851624416996481e-05, | |
| "loss": 4.1859, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.850785822245429e-05, | |
| "loss": 4.1722, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849947227494377e-05, | |
| "loss": 4.1868, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.849108632743325e-05, | |
| "loss": 4.1856, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.848271675872646e-05, | |
| "loss": 4.1907, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.847433081121595e-05, | |
| "loss": 4.1807, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.846594486370543e-05, | |
| "loss": 4.1859, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.845755891619491e-05, | |
| "loss": 4.175, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.844918934748812e-05, | |
| "loss": 4.1747, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.84408033999776e-05, | |
| "loss": 4.1618, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.843241745246708e-05, | |
| "loss": 4.1688, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.842403150495656e-05, | |
| "loss": 4.166, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.841564555744604e-05, | |
| "loss": 4.1645, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.840725960993552e-05, | |
| "loss": 4.1661, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8398873662425e-05, | |
| "loss": 4.173, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.839048771491447e-05, | |
| "loss": 4.1625, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8382118146207687e-05, | |
| "loss": 4.1639, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8373732198697167e-05, | |
| "loss": 4.1662, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8365346251186646e-05, | |
| "loss": 4.1513, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8356960303676126e-05, | |
| "loss": 4.1621, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834859073496934e-05, | |
| "loss": 4.1495, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.834020478745882e-05, | |
| "loss": 4.1445, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.833183521875203e-05, | |
| "loss": 4.1601, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.832344927124151e-05, | |
| "loss": 4.1463, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.831506332373099e-05, | |
| "loss": 4.1454, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.830667737622047e-05, | |
| "loss": 4.1358, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.829830780751368e-05, | |
| "loss": 4.1425, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828992186000316e-05, | |
| "loss": 4.1296, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.828153591249264e-05, | |
| "loss": 4.1575, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.827314996498212e-05, | |
| "loss": 4.1355, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8264780396275336e-05, | |
| "loss": 4.1476, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8256394448764816e-05, | |
| "loss": 4.1466, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8248008501254296e-05, | |
| "loss": 4.1275, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823962255374377e-05, | |
| "loss": 4.1215, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.823123660623325e-05, | |
| "loss": 4.1332, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.822285065872273e-05, | |
| "loss": 4.1233, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8214481090015945e-05, | |
| "loss": 4.1178, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.820609514250542e-05, | |
| "loss": 4.1365, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.81977091949949e-05, | |
| "loss": 4.1314, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818932324748438e-05, | |
| "loss": 4.1191, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.818093729997386e-05, | |
| "loss": 4.1123, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.817255135246334e-05, | |
| "loss": 4.1137, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8164181783756554e-05, | |
| "loss": 4.1262, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8155795836246034e-05, | |
| "loss": 4.1256, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8147409888735514e-05, | |
| "loss": 4.123, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8139023941224994e-05, | |
| "loss": 4.1258, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8130637993714474e-05, | |
| "loss": 4.125, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8122252046203954e-05, | |
| "loss": 4.1249, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.8113866098693434e-05, | |
| "loss": 4.1091, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.810549652998664e-05, | |
| "loss": 4.118, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.809711058247612e-05, | |
| "loss": 4.1149, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80887246349656e-05, | |
| "loss": 4.1022, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.808033868745508e-05, | |
| "loss": 4.1144, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.807195273994456e-05, | |
| "loss": 4.1136, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.806358317123777e-05, | |
| "loss": 4.1179, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.805519722372726e-05, | |
| "loss": 4.099, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.804681127621674e-05, | |
| "loss": 4.088, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.803842532870622e-05, | |
| "loss": 4.1055, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.80300393811957e-05, | |
| "loss": 4.105, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.802165343368518e-05, | |
| "loss": 4.1073, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.801326748617466e-05, | |
| "loss": 4.1008, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.800489791746787e-05, | |
| "loss": 4.1016, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.799651196995735e-05, | |
| "loss": 4.0987, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.798812602244683e-05, | |
| "loss": 4.0874, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.79797400749363e-05, | |
| "loss": 4.0959, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7971370506229516e-05, | |
| "loss": 4.0942, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7962984558718996e-05, | |
| "loss": 4.1015, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7954598611208476e-05, | |
| "loss": 4.1071, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7946212663697956e-05, | |
| "loss": 4.0982, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.793784309499117e-05, | |
| "loss": 4.086, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792945714748065e-05, | |
| "loss": 4.088, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.792107119997013e-05, | |
| "loss": 4.0892, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7912685252459605e-05, | |
| "loss": 4.0973, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7904299304949085e-05, | |
| "loss": 4.0829, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.78959297362423e-05, | |
| "loss": 4.0843, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7887543788731774e-05, | |
| "loss": 4.077, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7879157841221254e-05, | |
| "loss": 4.0957, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7870771893710734e-05, | |
| "loss": 4.0656, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7862385946200214e-05, | |
| "loss": 4.0803, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.785401637749343e-05, | |
| "loss": 4.0852, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.784563042998291e-05, | |
| "loss": 4.0736, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.783724448247239e-05, | |
| "loss": 4.0738, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782885853496187e-05, | |
| "loss": 4.083, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.782048896625508e-05, | |
| "loss": 4.073, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.781210301874456e-05, | |
| "loss": 4.0778, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.780371707123404e-05, | |
| "loss": 4.0805, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.779533112372352e-05, | |
| "loss": 4.0713, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.778696155501673e-05, | |
| "loss": 4.079, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.777857560750621e-05, | |
| "loss": 4.0847, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7770206038799423e-05, | |
| "loss": 4.0737, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7761820091288903e-05, | |
| "loss": 4.0665, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7753434143778383e-05, | |
| "loss": 4.0716, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.774504819626786e-05, | |
| "loss": 4.0716, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.773666224875734e-05, | |
| "loss": 4.0648, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.772827630124682e-05, | |
| "loss": 4.0603, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.77198903537363e-05, | |
| "loss": 4.063, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.771150440622578e-05, | |
| "loss": 4.0758, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.770313483751899e-05, | |
| "loss": 4.0663, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.769474889000847e-05, | |
| "loss": 4.0652, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.768636294249795e-05, | |
| "loss": 4.0562, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.767797699498743e-05, | |
| "loss": 4.0592, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766960742628064e-05, | |
| "loss": 4.056, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.766122147877012e-05, | |
| "loss": 4.073, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76528355312596e-05, | |
| "loss": 4.0694, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.764444958374909e-05, | |
| "loss": 4.0453, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.76360800150423e-05, | |
| "loss": 4.0531, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.762769406753178e-05, | |
| "loss": 4.0651, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761930812002126e-05, | |
| "loss": 4.0597, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.761092217251074e-05, | |
| "loss": 4.0652, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7602552603803946e-05, | |
| "loss": 4.0512, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7594166656293426e-05, | |
| "loss": 4.0585, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7585780708782906e-05, | |
| "loss": 4.0613, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7577394761272386e-05, | |
| "loss": 4.0534, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7569008813761866e-05, | |
| "loss": 4.0516, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7560639245055075e-05, | |
| "loss": 4.0562, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7552253297544555e-05, | |
| "loss": 4.0462, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.7543867350034035e-05, | |
| "loss": 4.0457, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.753548140252352e-05, | |
| "loss": 4.06, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.752711183381673e-05, | |
| "loss": 4.0409, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751874226510994e-05, | |
| "loss": 4.0463, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.751035631759942e-05, | |
| "loss": 4.0508, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.75019703700889e-05, | |
| "loss": 4.0455, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 4.04611349105835, | |
| "eval_runtime": 306.1519, | |
| "eval_samples_per_second": 1246.411, | |
| "eval_steps_per_second": 38.951, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.749358442257838e-05, | |
| "loss": 4.0283, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.748519847506786e-05, | |
| "loss": 4.0347, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.747681252755734e-05, | |
| "loss": 4.0529, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.746842658004682e-05, | |
| "loss": 4.0345, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74600406325363e-05, | |
| "loss": 4.0531, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.745165468502578e-05, | |
| "loss": 4.0278, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.744326873751526e-05, | |
| "loss": 4.0451, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.743488279000474e-05, | |
| "loss": 4.0256, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.742649684249422e-05, | |
| "loss": 4.0388, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.74181108949837e-05, | |
| "loss": 4.0354, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740972494747318e-05, | |
| "loss": 4.0332, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.740135537876639e-05, | |
| "loss": 4.0432, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.739300218886333e-05, | |
| "loss": 4.0248, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.738461624135281e-05, | |
| "loss": 4.0242, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.737623029384229e-05, | |
| "loss": 4.0223, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.736784434633177e-05, | |
| "loss": 4.0183, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735945839882125e-05, | |
| "loss": 4.021, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.735107245131073e-05, | |
| "loss": 4.0269, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.734268650380021e-05, | |
| "loss": 4.0204, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.733430055628969e-05, | |
| "loss": 4.0457, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.732591460877917e-05, | |
| "loss": 4.0241, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.731752866126865e-05, | |
| "loss": 4.0303, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730914271375813e-05, | |
| "loss": 4.0238, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.730077314505134e-05, | |
| "loss": 4.0295, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.729238719754082e-05, | |
| "loss": 4.0212, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.72840012500303e-05, | |
| "loss": 4.0171, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.727561530251978e-05, | |
| "loss": 4.0213, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.726722935500926e-05, | |
| "loss": 4.0112, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.725885978630247e-05, | |
| "loss": 3.9993, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.725047383879195e-05, | |
| "loss": 4.0125, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.724208789128143e-05, | |
| "loss": 4.0211, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7233718322574647e-05, | |
| "loss": 4.0172, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7225332375064127e-05, | |
| "loss": 4.0157, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7216946427553606e-05, | |
| "loss": 4.0187, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7208560480043086e-05, | |
| "loss": 4.0067, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7200190911336296e-05, | |
| "loss": 4.006, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7191804963825775e-05, | |
| "loss": 3.9989, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7183419016315255e-05, | |
| "loss": 4.0015, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7175033068804735e-05, | |
| "loss": 3.998, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7166663500097945e-05, | |
| "loss": 4.0005, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7158277552587424e-05, | |
| "loss": 4.0049, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7149891605076904e-05, | |
| "loss": 4.0124, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7141505657566384e-05, | |
| "loss": 4.0008, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.71331360888596e-05, | |
| "loss": 4.0052, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.712475014134908e-05, | |
| "loss": 4.0087, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.711636419383856e-05, | |
| "loss": 3.9909, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.710797824632804e-05, | |
| "loss": 4.0026, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709960867762125e-05, | |
| "loss": 3.9968, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.709122273011073e-05, | |
| "loss": 3.9862, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.708283678260021e-05, | |
| "loss": 4.0021, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.707445083508969e-05, | |
| "loss": 3.9938, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.706609764518663e-05, | |
| "loss": 3.9906, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.705771169767611e-05, | |
| "loss": 3.9861, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.704932575016559e-05, | |
| "loss": 3.9891, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7040939802655074e-05, | |
| "loss": 3.9741, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7032553855144554e-05, | |
| "loss": 4.0014, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7024167907634034e-05, | |
| "loss": 3.9855, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7015781960123514e-05, | |
| "loss": 3.9965, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.7007396012612994e-05, | |
| "loss": 3.9991, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.69990264439062e-05, | |
| "loss": 3.9802, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.699064049639568e-05, | |
| "loss": 3.9723, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.698225454888516e-05, | |
| "loss": 3.9901, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.697386860137464e-05, | |
| "loss": 3.9767, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.696549903266785e-05, | |
| "loss": 3.9713, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.695711308515733e-05, | |
| "loss": 3.9921, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694872713764681e-05, | |
| "loss": 3.9876, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.694034119013629e-05, | |
| "loss": 3.9735, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.693197162142951e-05, | |
| "loss": 3.9691, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.692358567391899e-05, | |
| "loss": 3.9721, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.691519972640847e-05, | |
| "loss": 3.979, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.690681377889795e-05, | |
| "loss": 3.9864, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6898444210191157e-05, | |
| "loss": 3.9797, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6890058262680636e-05, | |
| "loss": 3.988, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6881672315170116e-05, | |
| "loss": 3.9815, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6873286367659596e-05, | |
| "loss": 3.9879, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6864916798952806e-05, | |
| "loss": 3.9702, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6856530851442285e-05, | |
| "loss": 3.9785, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6848144903931765e-05, | |
| "loss": 3.9801, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.6839758956421245e-05, | |
| "loss": 3.9659, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.683138938771446e-05, | |
| "loss": 3.9765, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.682300344020394e-05, | |
| "loss": 3.9825, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.681461749269342e-05, | |
| "loss": 3.9821, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.68062315451829e-05, | |
| "loss": 3.9655, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.679786197647611e-05, | |
| "loss": 3.9529, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678947602896559e-05, | |
| "loss": 3.9726, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.678109008145507e-05, | |
| "loss": 3.9654, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.677270413394455e-05, | |
| "loss": 3.9769, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.676433456523776e-05, | |
| "loss": 3.9685, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.675596499653097e-05, | |
| "loss": 3.9687, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.674757904902045e-05, | |
| "loss": 3.9671, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6739193101509935e-05, | |
| "loss": 3.956, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6730807153999415e-05, | |
| "loss": 3.9638, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6722421206488895e-05, | |
| "loss": 3.9624, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6714035258978375e-05, | |
| "loss": 3.9756, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6705649311467855e-05, | |
| "loss": 3.9805, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6697263363957335e-05, | |
| "loss": 3.9688, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6688893795250544e-05, | |
| "loss": 3.9541, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6680507847740024e-05, | |
| "loss": 3.963, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6672121900229504e-05, | |
| "loss": 3.9609, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6663735952718984e-05, | |
| "loss": 3.9698, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.665536638401219e-05, | |
| "loss": 3.9569, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.664698043650167e-05, | |
| "loss": 3.9606, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663859448899115e-05, | |
| "loss": 3.9511, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.663020854148063e-05, | |
| "loss": 3.971, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.662183897277385e-05, | |
| "loss": 3.9422, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.661345302526333e-05, | |
| "loss": 3.9585, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.660508345655654e-05, | |
| "loss": 3.9545, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.659669750904602e-05, | |
| "loss": 3.9569, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65883115615355e-05, | |
| "loss": 3.9498, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657992561402498e-05, | |
| "loss": 3.9613, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.657153966651446e-05, | |
| "loss": 3.9568, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.656315371900393e-05, | |
| "loss": 3.9521, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.655476777149341e-05, | |
| "loss": 3.9605, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6546398202786626e-05, | |
| "loss": 3.9479, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6538012255276106e-05, | |
| "loss": 3.9643, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6529626307765586e-05, | |
| "loss": 3.9694, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.65212567390588e-05, | |
| "loss": 3.9522, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.651287079154828e-05, | |
| "loss": 3.9504, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.650448484403776e-05, | |
| "loss": 3.9506, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6496098896527235e-05, | |
| "loss": 3.9528, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6487712949016715e-05, | |
| "loss": 3.9464, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6479327001506195e-05, | |
| "loss": 3.9465, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6470941053995675e-05, | |
| "loss": 3.943, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6462555106485155e-05, | |
| "loss": 3.9605, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6454185537778364e-05, | |
| "loss": 3.9515, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6445799590267844e-05, | |
| "loss": 3.9489, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6437413642757324e-05, | |
| "loss": 3.941, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6429027695246804e-05, | |
| "loss": 3.9455, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.642065812654002e-05, | |
| "loss": 3.9401, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.64122721790295e-05, | |
| "loss": 3.9599, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.640390261032271e-05, | |
| "loss": 3.9559, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.639551666281219e-05, | |
| "loss": 3.9361, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.638713071530167e-05, | |
| "loss": 3.9362, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637874476779115e-05, | |
| "loss": 3.9511, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637035882028063e-05, | |
| "loss": 3.9445, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.636197287277011e-05, | |
| "loss": 3.9571, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.635358692525959e-05, | |
| "loss": 3.9359, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.634520097774907e-05, | |
| "loss": 3.9516, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.633683140904228e-05, | |
| "loss": 3.945, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.632844546153176e-05, | |
| "loss": 3.9465, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6320059514021244e-05, | |
| "loss": 3.9431, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6311673566510724e-05, | |
| "loss": 3.9447, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6303303997803934e-05, | |
| "loss": 3.9362, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6294918050293413e-05, | |
| "loss": 3.9393, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6286532102782893e-05, | |
| "loss": 3.9497, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.6278146155272373e-05, | |
| "loss": 3.9389, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626977658656558e-05, | |
| "loss": 3.9349, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.626139063905506e-05, | |
| "loss": 3.9408, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.625300469154454e-05, | |
| "loss": 3.9381, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.9720582962036133, | |
| "eval_runtime": 303.5227, | |
| "eval_samples_per_second": 1257.207, | |
| "eval_steps_per_second": 39.289, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.624461874403402e-05, | |
| "loss": 3.9291, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.623624917532723e-05, | |
| "loss": 3.9265, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.622786322781671e-05, | |
| "loss": 3.947, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.62194772803062e-05, | |
| "loss": 3.9296, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.621109133279568e-05, | |
| "loss": 3.9511, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.620272176408889e-05, | |
| "loss": 3.9187, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.619433581657837e-05, | |
| "loss": 3.9413, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.618594986906785e-05, | |
| "loss": 3.9247, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.617756392155733e-05, | |
| "loss": 3.9298, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6169194352850536e-05, | |
| "loss": 3.9343, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6160808405340016e-05, | |
| "loss": 3.9274, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6152422457829496e-05, | |
| "loss": 3.9388, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6144069267926434e-05, | |
| "loss": 3.9238, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.613568332041592e-05, | |
| "loss": 3.9195, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.61272973729054e-05, | |
| "loss": 3.9241, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611891142539488e-05, | |
| "loss": 3.9122, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.611052547788436e-05, | |
| "loss": 3.922, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.610213953037384e-05, | |
| "loss": 3.9251, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.609375358286332e-05, | |
| "loss": 3.9186, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.60853676353528e-05, | |
| "loss": 3.9435, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.607698168784228e-05, | |
| "loss": 3.9275, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606859574033176e-05, | |
| "loss": 3.9341, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.606020979282124e-05, | |
| "loss": 3.9226, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6051823845310714e-05, | |
| "loss": 3.9275, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.604345427660393e-05, | |
| "loss": 3.9205, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.603506832909341e-05, | |
| "loss": 3.9201, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.602668238158289e-05, | |
| "loss": 3.9226, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.60183128128761e-05, | |
| "loss": 3.9122, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6009926865365585e-05, | |
| "loss": 3.9083, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6001540917855065e-05, | |
| "loss": 3.9116, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.599315497034454e-05, | |
| "loss": 3.9243, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5984785401637754e-05, | |
| "loss": 3.9214, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5976399454127234e-05, | |
| "loss": 3.922, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5968013506616714e-05, | |
| "loss": 3.9253, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5959643937909923e-05, | |
| "loss": 3.9054, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5951257990399403e-05, | |
| "loss": 3.9136, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.594287204288888e-05, | |
| "loss": 3.9089, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.593448609537836e-05, | |
| "loss": 3.9012, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5926100147867836e-05, | |
| "loss": 3.9058, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.591771420035732e-05, | |
| "loss": 3.9048, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590934463165054e-05, | |
| "loss": 3.9135, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.590095868414001e-05, | |
| "loss": 3.9151, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.589257273662949e-05, | |
| "loss": 3.9127, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.588418678911897e-05, | |
| "loss": 3.9061, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.587580084160845e-05, | |
| "loss": 3.9179, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.586741489409793e-05, | |
| "loss": 3.8995, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585904532539114e-05, | |
| "loss": 3.9107, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.585065937788062e-05, | |
| "loss": 3.9055, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.58422734303701e-05, | |
| "loss": 3.8928, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.583388748285958e-05, | |
| "loss": 3.9142, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.582550153534906e-05, | |
| "loss": 3.902, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.581711558783854e-05, | |
| "loss": 3.9021, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580874601913176e-05, | |
| "loss": 3.893, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.580036007162124e-05, | |
| "loss": 3.8982, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.579197412411072e-05, | |
| "loss": 3.8843, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.57835881766002e-05, | |
| "loss": 3.9066, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.577520222908968e-05, | |
| "loss": 3.8999, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.576681628157916e-05, | |
| "loss": 3.9031, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5758430334068637e-05, | |
| "loss": 3.9092, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5750060765361846e-05, | |
| "loss": 3.8931, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5741691196655055e-05, | |
| "loss": 3.882, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5733305249144535e-05, | |
| "loss": 3.904, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5724919301634015e-05, | |
| "loss": 3.8882, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5716533354123495e-05, | |
| "loss": 3.8844, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5708147406612975e-05, | |
| "loss": 3.9049, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569976145910246e-05, | |
| "loss": 3.8985, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.569137551159194e-05, | |
| "loss": 3.8881, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.568298956408142e-05, | |
| "loss": 3.8861, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.567461999537463e-05, | |
| "loss": 3.8798, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.566623404786411e-05, | |
| "loss": 3.8905, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.565784810035359e-05, | |
| "loss": 3.9019, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.56494785316468e-05, | |
| "loss": 3.8974, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.564109258413628e-05, | |
| "loss": 3.9009, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.563270663662576e-05, | |
| "loss": 3.8929, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.562432068911524e-05, | |
| "loss": 3.9086, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.561593474160472e-05, | |
| "loss": 3.8838, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.56075487940942e-05, | |
| "loss": 3.8955, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.559916284658368e-05, | |
| "loss": 3.8933, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5590793277876895e-05, | |
| "loss": 3.8775, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.5582407330366375e-05, | |
| "loss": 3.8993, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.557402138285585e-05, | |
| "loss": 3.8966, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.556563543534533e-05, | |
| "loss": 3.8931, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.555724948783481e-05, | |
| "loss": 3.8829, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.554886354032429e-05, | |
| "loss": 3.8743, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.55404939716175e-05, | |
| "loss": 3.8874, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.553210802410698e-05, | |
| "loss": 3.888, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.552372207659646e-05, | |
| "loss": 3.8909, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.551533612908594e-05, | |
| "loss": 3.8807, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.550695018157542e-05, | |
| "loss": 3.8897, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549858061286863e-05, | |
| "loss": 3.8825, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.549019466535811e-05, | |
| "loss": 3.8793, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.548180871784759e-05, | |
| "loss": 3.8804, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.547342277033707e-05, | |
| "loss": 3.8775, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.546505320163028e-05, | |
| "loss": 3.8967, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.545666725411976e-05, | |
| "loss": 3.8953, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.544828130660924e-05, | |
| "loss": 3.8939, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.543989535909872e-05, | |
| "loss": 3.8717, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.54315094115882e-05, | |
| "loss": 3.8851, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.542313984288141e-05, | |
| "loss": 3.8799, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.541475389537089e-05, | |
| "loss": 3.8857, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.540636794786037e-05, | |
| "loss": 3.8774, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.539798200034985e-05, | |
| "loss": 3.8769, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5389612431643066e-05, | |
| "loss": 3.8699, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5381226484132546e-05, | |
| "loss": 3.8907, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5372840536622026e-05, | |
| "loss": 3.8632, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5364454589111506e-05, | |
| "loss": 3.879, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5356068641600986e-05, | |
| "loss": 3.8781, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5347699072894195e-05, | |
| "loss": 3.8771, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5339313125383675e-05, | |
| "loss": 3.8704, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5330927177873155e-05, | |
| "loss": 3.887, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5322541230362635e-05, | |
| "loss": 3.8765, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5314171661655844e-05, | |
| "loss": 3.8749, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5305785714145324e-05, | |
| "loss": 3.8868, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5297399766634804e-05, | |
| "loss": 3.8723, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5289013819124284e-05, | |
| "loss": 3.8844, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.52806442504175e-05, | |
| "loss": 3.891, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.527225830290698e-05, | |
| "loss": 3.878, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.526387235539646e-05, | |
| "loss": 3.8731, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.525548640788594e-05, | |
| "loss": 3.8714, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.524711683917915e-05, | |
| "loss": 3.8734, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523873089166863e-05, | |
| "loss": 3.8704, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.523034494415811e-05, | |
| "loss": 3.8772, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.522195899664759e-05, | |
| "loss": 3.8629, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.521357304913707e-05, | |
| "loss": 3.8834, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.520520348043028e-05, | |
| "loss": 3.8785, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.519681753291976e-05, | |
| "loss": 3.8753, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.518843158540924e-05, | |
| "loss": 3.8618, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5180045637898725e-05, | |
| "loss": 3.8704, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5171676069191934e-05, | |
| "loss": 3.8687, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5163290121681414e-05, | |
| "loss": 3.88, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5154904174170894e-05, | |
| "loss": 3.8827, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5146518226660373e-05, | |
| "loss": 3.8678, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.513814865795358e-05, | |
| "loss": 3.8562, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512976271044306e-05, | |
| "loss": 3.8768, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.512137676293254e-05, | |
| "loss": 3.868, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.511299081542202e-05, | |
| "loss": 3.8832, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.510462124671523e-05, | |
| "loss": 3.8621, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.509623529920471e-05, | |
| "loss": 3.8745, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.508784935169419e-05, | |
| "loss": 3.8729, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507946340418368e-05, | |
| "loss": 3.8761, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.507107745667316e-05, | |
| "loss": 3.8702, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.506270788796637e-05, | |
| "loss": 3.8704, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.505432194045585e-05, | |
| "loss": 3.8622, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.504593599294533e-05, | |
| "loss": 3.8658, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.50375500454348e-05, | |
| "loss": 3.8732, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5029180476728016e-05, | |
| "loss": 3.8697, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5020794529217496e-05, | |
| "loss": 3.8607, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.5012408581706976e-05, | |
| "loss": 3.87, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.500402263419645e-05, | |
| "loss": 3.8628, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.9311435222625732, | |
| "eval_runtime": 301.8823, | |
| "eval_samples_per_second": 1264.039, | |
| "eval_steps_per_second": 39.502, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.499563668668593e-05, | |
| "loss": 3.8512, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4987250739175416e-05, | |
| "loss": 3.8564, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4978864791664896e-05, | |
| "loss": 3.874, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4970478844154376e-05, | |
| "loss": 3.8584, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4962092896643856e-05, | |
| "loss": 3.8741, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4953706949133336e-05, | |
| "loss": 3.857, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4945321001622816e-05, | |
| "loss": 3.8632, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4936935054112296e-05, | |
| "loss": 3.8569, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4928565485405505e-05, | |
| "loss": 3.86, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4920179537894985e-05, | |
| "loss": 3.862, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4911793590384465e-05, | |
| "loss": 3.8576, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.4903407642873945e-05, | |
| "loss": 3.8679, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.489505445297088e-05, | |
| "loss": 3.8551, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.488666850546037e-05, | |
| "loss": 3.8523, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.487828255794985e-05, | |
| "loss": 3.8532, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486989661043933e-05, | |
| "loss": 3.8438, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.486151066292881e-05, | |
| "loss": 3.8535, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.485312471541829e-05, | |
| "loss": 3.8562, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.484473876790777e-05, | |
| "loss": 3.8485, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.483635282039725e-05, | |
| "loss": 3.8751, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.482798325169046e-05, | |
| "loss": 3.8593, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481959730417994e-05, | |
| "loss": 3.8642, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.481121135666942e-05, | |
| "loss": 3.8544, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.48028254091589e-05, | |
| "loss": 3.8584, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.479445584045211e-05, | |
| "loss": 3.8493, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.478606989294159e-05, | |
| "loss": 3.8598, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.477768394543107e-05, | |
| "loss": 3.8531, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476929799792055e-05, | |
| "loss": 3.8464, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.476092842921376e-05, | |
| "loss": 3.8427, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.475254248170324e-05, | |
| "loss": 3.8438, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.474415653419272e-05, | |
| "loss": 3.8537, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.47357705866822e-05, | |
| "loss": 3.8549, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.472740101797541e-05, | |
| "loss": 3.8548, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.471901507046489e-05, | |
| "loss": 3.8567, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.471062912295437e-05, | |
| "loss": 3.8415, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.470224317544385e-05, | |
| "loss": 3.848, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.469387360673706e-05, | |
| "loss": 3.8464, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.468548765922654e-05, | |
| "loss": 3.8352, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.467710171171602e-05, | |
| "loss": 3.8431, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.46687157642055e-05, | |
| "loss": 3.8381, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.466034619549872e-05, | |
| "loss": 3.8492, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.46519602479882e-05, | |
| "loss": 3.8496, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.464357430047768e-05, | |
| "loss": 3.8476, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.463518835296716e-05, | |
| "loss": 3.8382, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4626818784260366e-05, | |
| "loss": 3.8532, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4618432836749846e-05, | |
| "loss": 3.8346, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4610046889239326e-05, | |
| "loss": 3.847, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4601660941728806e-05, | |
| "loss": 3.8396, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4593291373022015e-05, | |
| "loss": 3.8259, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4584905425511495e-05, | |
| "loss": 3.8515, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4576519478000975e-05, | |
| "loss": 3.8398, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4568133530490455e-05, | |
| "loss": 3.8377, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455976396178367e-05, | |
| "loss": 3.8331, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.455137801427315e-05, | |
| "loss": 3.832, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.454299206676263e-05, | |
| "loss": 3.8247, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.453460611925211e-05, | |
| "loss": 3.8374, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.452623655054532e-05, | |
| "loss": 3.8395, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.45178506030348e-05, | |
| "loss": 3.8399, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450946465552428e-05, | |
| "loss": 3.845, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.450107870801376e-05, | |
| "loss": 3.8333, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.449270913930697e-05, | |
| "loss": 3.8178, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.448432319179645e-05, | |
| "loss": 3.8442, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.447593724428593e-05, | |
| "loss": 3.8243, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.446755129677541e-05, | |
| "loss": 3.8185, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4459181728068624e-05, | |
| "loss": 3.8449, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4450795780558104e-05, | |
| "loss": 3.8346, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4442409833047584e-05, | |
| "loss": 3.8271, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.443402388553706e-05, | |
| "loss": 3.8253, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.442565431683027e-05, | |
| "loss": 3.817, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.441726836931975e-05, | |
| "loss": 3.8277, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.440888242180923e-05, | |
| "loss": 3.8424, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4400496474298706e-05, | |
| "loss": 3.8328, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.439212690559192e-05, | |
| "loss": 3.8383, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.43837409580814e-05, | |
| "loss": 3.8307, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.437535501057088e-05, | |
| "loss": 3.8485, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.436696906306036e-05, | |
| "loss": 3.8243, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435859949435358e-05, | |
| "loss": 3.8353, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.435021354684306e-05, | |
| "loss": 3.8333, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.434182759933253e-05, | |
| "loss": 3.8125, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.433344165182201e-05, | |
| "loss": 3.8432, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.432507208311523e-05, | |
| "loss": 3.8358, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.431668613560471e-05, | |
| "loss": 3.8315, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.430830018809418e-05, | |
| "loss": 3.8246, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.429991424058366e-05, | |
| "loss": 3.8127, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4291544671876876e-05, | |
| "loss": 3.8271, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4283158724366356e-05, | |
| "loss": 3.8252, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4274772776855836e-05, | |
| "loss": 3.8319, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.4266386829345316e-05, | |
| "loss": 3.8222, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.425801726063853e-05, | |
| "loss": 3.8269, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4249631313128005e-05, | |
| "loss": 3.8205, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4241245365617485e-05, | |
| "loss": 3.8242, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4232859418106965e-05, | |
| "loss": 3.8199, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4224473470596445e-05, | |
| "loss": 3.8198, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4216103901889654e-05, | |
| "loss": 3.8365, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4207717954379134e-05, | |
| "loss": 3.8362, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4199332006868614e-05, | |
| "loss": 3.8369, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4190946059358094e-05, | |
| "loss": 3.812, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.418257649065131e-05, | |
| "loss": 3.8257, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.417419054314079e-05, | |
| "loss": 3.8235, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.416580459563027e-05, | |
| "loss": 3.8266, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.415741864811975e-05, | |
| "loss": 3.8222, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414904907941296e-05, | |
| "loss": 3.8153, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.414066313190244e-05, | |
| "loss": 3.8175, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.413227718439192e-05, | |
| "loss": 3.8309, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.412390761568513e-05, | |
| "loss": 3.8066, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.411552166817461e-05, | |
| "loss": 3.8206, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.410713572066409e-05, | |
| "loss": 3.816, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.409874977315357e-05, | |
| "loss": 3.8249, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.4090380204446776e-05, | |
| "loss": 3.8135, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.408199425693626e-05, | |
| "loss": 3.8269, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.407360830942574e-05, | |
| "loss": 3.8226, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.406522236191522e-05, | |
| "loss": 3.8158, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.405685279320843e-05, | |
| "loss": 3.8292, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404846684569791e-05, | |
| "loss": 3.8173, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.404008089818739e-05, | |
| "loss": 3.8279, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.403169495067687e-05, | |
| "loss": 3.8308, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.402332538197008e-05, | |
| "loss": 3.8189, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.401493943445956e-05, | |
| "loss": 3.8157, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.400655348694904e-05, | |
| "loss": 3.8162, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.399816753943852e-05, | |
| "loss": 3.8145, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3989781591928e-05, | |
| "loss": 3.8159, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.398141202322122e-05, | |
| "loss": 3.819, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.39730260757107e-05, | |
| "loss": 3.8075, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.396464012820018e-05, | |
| "loss": 3.8266, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3956254180689657e-05, | |
| "loss": 3.8214, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3947884611982866e-05, | |
| "loss": 3.8225, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3939498664472346e-05, | |
| "loss": 3.8074, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3931112716961826e-05, | |
| "loss": 3.8136, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3922726769451306e-05, | |
| "loss": 3.8136, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3914357200744515e-05, | |
| "loss": 3.8221, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3905971253233995e-05, | |
| "loss": 3.8232, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3897585305723475e-05, | |
| "loss": 3.8162, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3889199358212955e-05, | |
| "loss": 3.802, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.388082978950617e-05, | |
| "loss": 3.8224, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.387244384199565e-05, | |
| "loss": 3.8133, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.386405789448513e-05, | |
| "loss": 3.8251, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.385567194697461e-05, | |
| "loss": 3.806, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.384730237826782e-05, | |
| "loss": 3.8232, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.38389164307573e-05, | |
| "loss": 3.8184, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.383053048324678e-05, | |
| "loss": 3.8239, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.382216091453999e-05, | |
| "loss": 3.8137, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.381377496702947e-05, | |
| "loss": 3.8142, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.380538901951895e-05, | |
| "loss": 3.8075, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.379700307200843e-05, | |
| "loss": 3.8127, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.378863350330164e-05, | |
| "loss": 3.8181, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3780247555791124e-05, | |
| "loss": 3.8171, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3771861608280604e-05, | |
| "loss": 3.8073, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3763475660770084e-05, | |
| "loss": 3.8124, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.375510609206329e-05, | |
| "loss": 3.8124, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.906532049179077, | |
| "eval_runtime": 304.2477, | |
| "eval_samples_per_second": 1254.211, | |
| "eval_steps_per_second": 39.195, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.374672014455277e-05, | |
| "loss": 3.8086, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.373833419704225e-05, | |
| "loss": 3.7994, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372994824953173e-05, | |
| "loss": 3.8196, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.372156230202121e-05, | |
| "loss": 3.8058, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.371319273331442e-05, | |
| "loss": 3.819, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.37048067858039e-05, | |
| "loss": 3.8062, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.369642083829338e-05, | |
| "loss": 3.8083, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.368805126958659e-05, | |
| "loss": 3.8078, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367966532207607e-05, | |
| "loss": 3.8056, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.367127937456556e-05, | |
| "loss": 3.8076, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.366289342705504e-05, | |
| "loss": 3.8088, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.365452385834825e-05, | |
| "loss": 3.8117, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3646154289641456e-05, | |
| "loss": 3.8042, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3637768342130936e-05, | |
| "loss": 3.7979, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3629382394620416e-05, | |
| "loss": 3.803, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3620996447109896e-05, | |
| "loss": 3.7891, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3612610499599376e-05, | |
| "loss": 3.7991, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3604224552088856e-05, | |
| "loss": 3.803, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3595838604578336e-05, | |
| "loss": 3.7999, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3587452657067816e-05, | |
| "loss": 3.8224, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.3579083088361025e-05, | |
| "loss": 3.8097, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.357069714085051e-05, | |
| "loss": 3.8116, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.356231119333999e-05, | |
| "loss": 3.8027, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.355392524582947e-05, | |
| "loss": 3.805, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.354555567712268e-05, | |
| "loss": 3.7993, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.353716972961216e-05, | |
| "loss": 3.8069, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352878378210164e-05, | |
| "loss": 3.7988, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.352039783459112e-05, | |
| "loss": 3.7972, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.351202826588433e-05, | |
| "loss": 3.7922, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.350364231837381e-05, | |
| "loss": 3.7927, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.349525637086329e-05, | |
| "loss": 3.8003, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.348687042335277e-05, | |
| "loss": 3.8056, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.347850085464598e-05, | |
| "loss": 3.8008, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3470114907135465e-05, | |
| "loss": 3.8078, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3461728959624945e-05, | |
| "loss": 3.7898, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3453359390918154e-05, | |
| "loss": 3.7983, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3444973443407634e-05, | |
| "loss": 3.7946, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3436587495897114e-05, | |
| "loss": 3.7857, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3428201548386594e-05, | |
| "loss": 3.7961, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.34198319796798e-05, | |
| "loss": 3.7817, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.341144603216928e-05, | |
| "loss": 3.7979, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.340306008465876e-05, | |
| "loss": 3.8001, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.339467413714824e-05, | |
| "loss": 3.8008, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.338630456844145e-05, | |
| "loss": 3.7902, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.337791862093093e-05, | |
| "loss": 3.8034, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.336953267342042e-05, | |
| "loss": 3.7812, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.33611467259099e-05, | |
| "loss": 3.8009, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.335277715720311e-05, | |
| "loss": 3.7887, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.334439120969259e-05, | |
| "loss": 3.7712, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.333600526218207e-05, | |
| "loss": 3.807, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.332761931467155e-05, | |
| "loss": 3.7884, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331924974596476e-05, | |
| "loss": 3.7939, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.331086379845424e-05, | |
| "loss": 3.7802, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3302477850943717e-05, | |
| "loss": 3.7815, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3294091903433197e-05, | |
| "loss": 3.7807, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3285722334726406e-05, | |
| "loss": 3.7848, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3277336387215886e-05, | |
| "loss": 3.7934, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326895043970537e-05, | |
| "loss": 3.7889, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.326056449219485e-05, | |
| "loss": 3.8014, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.325219492348806e-05, | |
| "loss": 3.7835, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.324380897597754e-05, | |
| "loss": 3.7724, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.323542302846702e-05, | |
| "loss": 3.795, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.32270370809565e-05, | |
| "loss": 3.774, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.321866751224971e-05, | |
| "loss": 3.774, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.321028156473919e-05, | |
| "loss": 3.7947, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.320189561722867e-05, | |
| "loss": 3.789, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.319350966971815e-05, | |
| "loss": 3.776, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.318514010101136e-05, | |
| "loss": 3.7795, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.317675415350084e-05, | |
| "loss": 3.769, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3168368205990326e-05, | |
| "loss": 3.7787, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.31599822584798e-05, | |
| "loss": 3.7935, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3151612689773015e-05, | |
| "loss": 3.7865, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3143226742262495e-05, | |
| "loss": 3.7895, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3134840794751975e-05, | |
| "loss": 3.7858, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.312645484724145e-05, | |
| "loss": 3.7998, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3118085278534664e-05, | |
| "loss": 3.7775, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.3109699331024144e-05, | |
| "loss": 3.7909, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.310131338351362e-05, | |
| "loss": 3.7835, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.30929274360031e-05, | |
| "loss": 3.766, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.308455786729631e-05, | |
| "loss": 3.7933, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.307617191978579e-05, | |
| "loss": 3.7882, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.306778597227527e-05, | |
| "loss": 3.785, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305940002476475e-05, | |
| "loss": 3.7804, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.305103045605797e-05, | |
| "loss": 3.7701, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.304264450854745e-05, | |
| "loss": 3.7727, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.303427493984066e-05, | |
| "loss": 3.7792, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.302588899233014e-05, | |
| "loss": 3.7861, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.301750304481962e-05, | |
| "loss": 3.7753, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.300911709730909e-05, | |
| "loss": 3.7821, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.300073114979857e-05, | |
| "loss": 3.774, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.299234520228805e-05, | |
| "loss": 3.7781, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.298395925477753e-05, | |
| "loss": 3.7728, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.297557330726702e-05, | |
| "loss": 3.7764, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2967203738560227e-05, | |
| "loss": 3.7874, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2958817791049707e-05, | |
| "loss": 3.7899, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2950431843539186e-05, | |
| "loss": 3.7918, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2942045896028666e-05, | |
| "loss": 3.7659, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2933676327321876e-05, | |
| "loss": 3.7823, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2925290379811355e-05, | |
| "loss": 3.7719, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2916904432300835e-05, | |
| "loss": 3.785, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2908518484790315e-05, | |
| "loss": 3.7736, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2900148916083525e-05, | |
| "loss": 3.7747, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2891762968573004e-05, | |
| "loss": 3.7699, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2883377021062484e-05, | |
| "loss": 3.7844, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.287499107355197e-05, | |
| "loss": 3.7604, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.286662150484518e-05, | |
| "loss": 3.78, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.285823555733466e-05, | |
| "loss": 3.7664, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284984960982414e-05, | |
| "loss": 3.7817, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.284146366231362e-05, | |
| "loss": 3.7712, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.283309409360683e-05, | |
| "loss": 3.775, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.282470814609631e-05, | |
| "loss": 3.7768, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.281632219858579e-05, | |
| "loss": 3.7771, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2807952629879e-05, | |
| "loss": 3.7788, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.279956668236848e-05, | |
| "loss": 3.7736, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.279118073485796e-05, | |
| "loss": 3.7776, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.278279478734744e-05, | |
| "loss": 3.7923, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2774425218640654e-05, | |
| "loss": 3.7736, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2766039271130134e-05, | |
| "loss": 3.7709, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2757653323619614e-05, | |
| "loss": 3.7739, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2749267376109094e-05, | |
| "loss": 3.7728, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.27408978074023e-05, | |
| "loss": 3.7637, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.273251185989178e-05, | |
| "loss": 3.777, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.272412591238126e-05, | |
| "loss": 3.7625, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.271573996487074e-05, | |
| "loss": 3.7804, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.270737039616395e-05, | |
| "loss": 3.7795, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269898444865343e-05, | |
| "loss": 3.7752, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.269059850114291e-05, | |
| "loss": 3.7654, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.268221255363239e-05, | |
| "loss": 3.7665, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.267384298492561e-05, | |
| "loss": 3.7722, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.266545703741509e-05, | |
| "loss": 3.7797, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.265707108990457e-05, | |
| "loss": 3.7749, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.264868514239405e-05, | |
| "loss": 3.7784, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2640315573687257e-05, | |
| "loss": 3.7578, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2631929626176737e-05, | |
| "loss": 3.7778, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2623543678666216e-05, | |
| "loss": 3.77, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2615157731155696e-05, | |
| "loss": 3.7815, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2606788162448906e-05, | |
| "loss": 3.762, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2598402214938386e-05, | |
| "loss": 3.782, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2590016267427865e-05, | |
| "loss": 3.7723, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2581630319917345e-05, | |
| "loss": 3.7819, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.257326075121056e-05, | |
| "loss": 3.7644, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.256487480370004e-05, | |
| "loss": 3.7797, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.255648885618952e-05, | |
| "loss": 3.7587, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.2548102908679e-05, | |
| "loss": 3.7717, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253973333997221e-05, | |
| "loss": 3.7758, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.253134739246169e-05, | |
| "loss": 3.768, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.252296144495117e-05, | |
| "loss": 3.7699, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.251457549744065e-05, | |
| "loss": 3.7633, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.250620592873386e-05, | |
| "loss": 3.7715, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.89036226272583, | |
| "eval_runtime": 303.6875, | |
| "eval_samples_per_second": 1256.525, | |
| "eval_steps_per_second": 39.267, | |
| "step": 457920 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.249781998122334e-05, | |
| "loss": 3.7661, | |
| "step": 458240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.248943403371282e-05, | |
| "loss": 3.7565, | |
| "step": 458752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.24810480862023e-05, | |
| "loss": 3.7719, | |
| "step": 459264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.247266213869178e-05, | |
| "loss": 3.7629, | |
| "step": 459776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2464276191181266e-05, | |
| "loss": 3.7766, | |
| "step": 460288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2455890243670746e-05, | |
| "loss": 3.7613, | |
| "step": 460800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2447504296160226e-05, | |
| "loss": 3.7666, | |
| "step": 461312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.24391183486497e-05, | |
| "loss": 3.767, | |
| "step": 461824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2430748779942915e-05, | |
| "loss": 3.7627, | |
| "step": 462336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2422362832432395e-05, | |
| "loss": 3.7655, | |
| "step": 462848 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2413976884921875e-05, | |
| "loss": 3.7662, | |
| "step": 463360 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.240559093741135e-05, | |
| "loss": 3.7694, | |
| "step": 463872 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.239723774750829e-05, | |
| "loss": 3.7615, | |
| "step": 464384 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.238885179999777e-05, | |
| "loss": 3.7562, | |
| "step": 464896 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.238046585248725e-05, | |
| "loss": 3.7639, | |
| "step": 465408 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.237207990497673e-05, | |
| "loss": 3.7461, | |
| "step": 465920 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.236369395746622e-05, | |
| "loss": 3.7573, | |
| "step": 466432 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.23553080099557e-05, | |
| "loss": 3.7595, | |
| "step": 466944 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.234692206244517e-05, | |
| "loss": 3.7555, | |
| "step": 467456 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.233853611493465e-05, | |
| "loss": 3.7823, | |
| "step": 467968 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.233016654622787e-05, | |
| "loss": 3.7707, | |
| "step": 468480 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.232178059871735e-05, | |
| "loss": 3.7702, | |
| "step": 468992 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.231339465120682e-05, | |
| "loss": 3.7578, | |
| "step": 469504 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.23050087036963e-05, | |
| "loss": 3.7635, | |
| "step": 470016 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.229663913498952e-05, | |
| "loss": 3.7575, | |
| "step": 470528 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2288253187479e-05, | |
| "loss": 3.7639, | |
| "step": 471040 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.227986723996847e-05, | |
| "loss": 3.7588, | |
| "step": 471552 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.227148129245796e-05, | |
| "loss": 3.7593, | |
| "step": 472064 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.226311172375117e-05, | |
| "loss": 3.749, | |
| "step": 472576 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.2254725776240646e-05, | |
| "loss": 3.7546, | |
| "step": 473088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2246339828730126e-05, | |
| "loss": 3.7548, | |
| "step": 473600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2237953881219606e-05, | |
| "loss": 3.766, | |
| "step": 474112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.222958431251282e-05, | |
| "loss": 3.7629, | |
| "step": 474624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2221198365002295e-05, | |
| "loss": 3.7678, | |
| "step": 475136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2212812417491775e-05, | |
| "loss": 3.7502, | |
| "step": 475648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.220444284878499e-05, | |
| "loss": 3.7539, | |
| "step": 476160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.219605690127447e-05, | |
| "loss": 3.7575, | |
| "step": 476672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2187670953763944e-05, | |
| "loss": 3.7451, | |
| "step": 477184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2179285006253424e-05, | |
| "loss": 3.7529, | |
| "step": 477696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.217091543754664e-05, | |
| "loss": 3.7474, | |
| "step": 478208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.216252949003612e-05, | |
| "loss": 3.7551, | |
| "step": 478720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.21541435425256e-05, | |
| "loss": 3.76, | |
| "step": 479232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.214575759501508e-05, | |
| "loss": 3.7587, | |
| "step": 479744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2137388026308296e-05, | |
| "loss": 3.7524, | |
| "step": 480256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.212900207879777e-05, | |
| "loss": 3.7579, | |
| "step": 480768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.212061613128725e-05, | |
| "loss": 3.7477, | |
| "step": 481280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.211223018377673e-05, | |
| "loss": 3.7582, | |
| "step": 481792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.210384423626621e-05, | |
| "loss": 3.7504, | |
| "step": 482304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.209545828875569e-05, | |
| "loss": 3.7304, | |
| "step": 482816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.208707234124517e-05, | |
| "loss": 3.7733, | |
| "step": 483328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.207870277253838e-05, | |
| "loss": 3.7459, | |
| "step": 483840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2070316825027865e-05, | |
| "loss": 3.755, | |
| "step": 484352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2061947256321074e-05, | |
| "loss": 3.7384, | |
| "step": 484864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2053561308810554e-05, | |
| "loss": 3.7465, | |
| "step": 485376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2045175361300034e-05, | |
| "loss": 3.7335, | |
| "step": 485888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2036789413789514e-05, | |
| "loss": 3.7483, | |
| "step": 486400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.2028403466278993e-05, | |
| "loss": 3.7509, | |
| "step": 486912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.20200338975722e-05, | |
| "loss": 3.7513, | |
| "step": 487424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.201164795006168e-05, | |
| "loss": 3.7591, | |
| "step": 487936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.200326200255116e-05, | |
| "loss": 3.7446, | |
| "step": 488448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.199487605504064e-05, | |
| "loss": 3.7336, | |
| "step": 488960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.198649010753012e-05, | |
| "loss": 3.7587, | |
| "step": 489472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.19781041600196e-05, | |
| "loss": 3.7337, | |
| "step": 489984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.196971821250908e-05, | |
| "loss": 3.7317, | |
| "step": 490496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.196133226499856e-05, | |
| "loss": 3.7553, | |
| "step": 491008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.195296269629178e-05, | |
| "loss": 3.7507, | |
| "step": 491520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.194457674878126e-05, | |
| "loss": 3.7361, | |
| "step": 492032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.193619080127074e-05, | |
| "loss": 3.7424, | |
| "step": 492544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.192780485376022e-05, | |
| "loss": 3.7318, | |
| "step": 493056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.191943528505343e-05, | |
| "loss": 3.7416, | |
| "step": 493568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.191104933754291e-05, | |
| "loss": 3.7503, | |
| "step": 494080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.190266339003239e-05, | |
| "loss": 3.7479, | |
| "step": 494592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.189427744252187e-05, | |
| "loss": 3.7542, | |
| "step": 495104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1885924252618805e-05, | |
| "loss": 3.749, | |
| "step": 495616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1877538305108285e-05, | |
| "loss": 3.757, | |
| "step": 496128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1869152357597765e-05, | |
| "loss": 3.7363, | |
| "step": 496640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.186076641008725e-05, | |
| "loss": 3.7545, | |
| "step": 497152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.185238046257673e-05, | |
| "loss": 3.74, | |
| "step": 497664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.184399451506621e-05, | |
| "loss": 3.7331, | |
| "step": 498176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.183560856755569e-05, | |
| "loss": 3.7514, | |
| "step": 498688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.182722262004517e-05, | |
| "loss": 3.7531, | |
| "step": 499200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.181885305133838e-05, | |
| "loss": 3.7471, | |
| "step": 499712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.181046710382786e-05, | |
| "loss": 3.7411, | |
| "step": 500224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.180208115631734e-05, | |
| "loss": 3.7326, | |
| "step": 500736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.179371158761055e-05, | |
| "loss": 3.733, | |
| "step": 501248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.178534201890376e-05, | |
| "loss": 3.7429, | |
| "step": 501760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.177695607139324e-05, | |
| "loss": 3.7503, | |
| "step": 502272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.176857012388272e-05, | |
| "loss": 3.7359, | |
| "step": 502784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.1760184176372206e-05, | |
| "loss": 3.7451, | |
| "step": 503296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1751798228861685e-05, | |
| "loss": 3.7287, | |
| "step": 503808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1743428660154895e-05, | |
| "loss": 3.7435, | |
| "step": 504320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1735042712644375e-05, | |
| "loss": 3.7348, | |
| "step": 504832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1726656765133854e-05, | |
| "loss": 3.739, | |
| "step": 505344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1718270817623334e-05, | |
| "loss": 3.7494, | |
| "step": 505856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1709884870112814e-05, | |
| "loss": 3.7523, | |
| "step": 506368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1701498922602294e-05, | |
| "loss": 3.752, | |
| "step": 506880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1693129353895503e-05, | |
| "loss": 3.7274, | |
| "step": 507392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1684743406384983e-05, | |
| "loss": 3.7453, | |
| "step": 507904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.167635745887446e-05, | |
| "loss": 3.733, | |
| "step": 508416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.166797151136394e-05, | |
| "loss": 3.7485, | |
| "step": 508928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.165960194265716e-05, | |
| "loss": 3.7368, | |
| "step": 509440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.165121599514664e-05, | |
| "loss": 3.7411, | |
| "step": 509952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.164283004763612e-05, | |
| "loss": 3.7323, | |
| "step": 510464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.16344441001256e-05, | |
| "loss": 3.7434, | |
| "step": 510976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.162607453141881e-05, | |
| "loss": 3.7268, | |
| "step": 511488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.161768858390829e-05, | |
| "loss": 3.7417, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.160930263639777e-05, | |
| "loss": 3.7274, | |
| "step": 512512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.160091668888725e-05, | |
| "loss": 3.7472, | |
| "step": 513024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.159253074137672e-05, | |
| "loss": 3.7324, | |
| "step": 513536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.158416117266994e-05, | |
| "loss": 3.7378, | |
| "step": 514048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.157577522515942e-05, | |
| "loss": 3.7369, | |
| "step": 514560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.15673892776489e-05, | |
| "loss": 3.7418, | |
| "step": 515072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.155900333013838e-05, | |
| "loss": 3.7408, | |
| "step": 515584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.155061738262786e-05, | |
| "loss": 3.7382, | |
| "step": 516096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.154223143511734e-05, | |
| "loss": 3.7437, | |
| "step": 516608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.153384548760682e-05, | |
| "loss": 3.7497, | |
| "step": 517120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.15254595400963e-05, | |
| "loss": 3.7417, | |
| "step": 517632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1517089971389506e-05, | |
| "loss": 3.7358, | |
| "step": 518144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1508704023878986e-05, | |
| "loss": 3.7324, | |
| "step": 518656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1500318076368466e-05, | |
| "loss": 3.738, | |
| "step": 519168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1491932128857946e-05, | |
| "loss": 3.7274, | |
| "step": 519680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1483562560151155e-05, | |
| "loss": 3.7402, | |
| "step": 520192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1475176612640635e-05, | |
| "loss": 3.7237, | |
| "step": 520704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.146680704393385e-05, | |
| "loss": 3.7458, | |
| "step": 521216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.145842109642333e-05, | |
| "loss": 3.7402, | |
| "step": 521728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.145003514891281e-05, | |
| "loss": 3.7392, | |
| "step": 522240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.144164920140229e-05, | |
| "loss": 3.7286, | |
| "step": 522752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.143326325389177e-05, | |
| "loss": 3.7296, | |
| "step": 523264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.142489368518498e-05, | |
| "loss": 3.7354, | |
| "step": 523776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.141650773767446e-05, | |
| "loss": 3.7449, | |
| "step": 524288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.140812179016394e-05, | |
| "loss": 3.7365, | |
| "step": 524800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.139973584265342e-05, | |
| "loss": 3.7394, | |
| "step": 525312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.13913498951429e-05, | |
| "loss": 3.7246, | |
| "step": 525824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.138296394763238e-05, | |
| "loss": 3.7434, | |
| "step": 526336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.137457800012186e-05, | |
| "loss": 3.7322, | |
| "step": 526848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.136620843141507e-05, | |
| "loss": 3.7476, | |
| "step": 527360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.135782248390455e-05, | |
| "loss": 3.726, | |
| "step": 527872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1349436536394035e-05, | |
| "loss": 3.7429, | |
| "step": 528384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1341050588883515e-05, | |
| "loss": 3.7354, | |
| "step": 528896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1332681020176724e-05, | |
| "loss": 3.7506, | |
| "step": 529408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1324295072666204e-05, | |
| "loss": 3.7279, | |
| "step": 529920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.1315909125155684e-05, | |
| "loss": 3.7394, | |
| "step": 530432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.130753955644889e-05, | |
| "loss": 3.7247, | |
| "step": 530944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.129915360893837e-05, | |
| "loss": 3.7362, | |
| "step": 531456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.129076766142785e-05, | |
| "loss": 3.7392, | |
| "step": 531968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.128238171391733e-05, | |
| "loss": 3.7304, | |
| "step": 532480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.127399576640681e-05, | |
| "loss": 3.7378, | |
| "step": 532992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.126562619770002e-05, | |
| "loss": 3.7223, | |
| "step": 533504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.12572402501895e-05, | |
| "loss": 3.7381, | |
| "step": 534016 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.878344774246216, | |
| "eval_runtime": 305.0899, | |
| "eval_samples_per_second": 1250.75, | |
| "eval_steps_per_second": 39.087, | |
| "step": 534240 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 3.6979707936697344e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |