| { |
| "best_metric": 3.890727996826172, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/pp-mod-subj/transformer/4/checkpoints/checkpoint-915830", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 915830, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 11.0005, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.849, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.1928, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.969, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.818, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.6938, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.6036, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.528, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4669, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.3968, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3531, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.3092, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 5.276, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.2132, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1737, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1452, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1033, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0758, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0546, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0218, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983231380739706e-05, |
| "loss": 5.0072, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.9812, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9481, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9428, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.913, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.8886, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8751, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.866, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8306, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.8223, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.7984, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.7886, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.79, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.97233456261715e-05, |
| "loss": 4.7668, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971495967866098e-05, |
| "loss": 4.7559, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 4.7431, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969818778363994e-05, |
| "loss": 4.7353, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.723, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.708, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.6985, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6794, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 4.6781, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964790485618428e-05, |
| "loss": 4.6498, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 4.6591, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6542, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6418, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9614377444945934e-05, |
| "loss": 4.6241, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9605991497435414e-05, |
| "loss": 4.6233, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9597605549924894e-05, |
| "loss": 4.618, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9589219602414374e-05, |
| "loss": 4.6003, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958085003370758e-05, |
| "loss": 4.588, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957248046500079e-05, |
| "loss": 4.5828, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956409451749027e-05, |
| "loss": 4.5931, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955570856997975e-05, |
| "loss": 4.5609, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5819, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5458, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.5538, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5441, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5404, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5321, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949702331620985e-05, |
| "loss": 4.526, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948863736869932e-05, |
| "loss": 4.5087, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.5206, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.5004, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946349590497149e-05, |
| "loss": 4.5066, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945510995746097e-05, |
| "loss": 4.498, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.4885, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.4713, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9429968493733145e-05, |
| "loss": 4.4851, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421598925026354e-05, |
| "loss": 4.475, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941321297751584e-05, |
| "loss": 4.4599, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.940482703000532e-05, |
| "loss": 4.466, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396441082494794e-05, |
| "loss": 4.4575, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388055134984274e-05, |
| "loss": 4.4561, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379669187473754e-05, |
| "loss": 4.4584, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371283239963234e-05, |
| "loss": 4.4502, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.936291367125644e-05, |
| "loss": 4.4416, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935452772374592e-05, |
| "loss": 4.4269, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93461417762354e-05, |
| "loss": 4.4338, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933775582872488e-05, |
| "loss": 4.4308, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932936988121436e-05, |
| "loss": 4.4331, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932098393370384e-05, |
| "loss": 4.4258, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931259798619332e-05, |
| "loss": 4.4131, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930421203868281e-05, |
| "loss": 4.3969, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929584246997602e-05, |
| "loss": 4.4015, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92874565224655e-05, |
| "loss": 4.4024, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927907057495498e-05, |
| "loss": 4.4031, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927068462744446e-05, |
| "loss": 4.3985, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926231505873767e-05, |
| "loss": 4.3939, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925392911122715e-05, |
| "loss": 4.3819, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924555954252036e-05, |
| "loss": 4.3787, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923717359500984e-05, |
| "loss": 4.3765, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922878764749932e-05, |
| "loss": 4.3835, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922041807879253e-05, |
| "loss": 4.3755, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921203213128201e-05, |
| "loss": 4.352, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920364618377149e-05, |
| "loss": 4.3571, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919526023626097e-05, |
| "loss": 4.3629, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918687428875045e-05, |
| "loss": 4.3642, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917848834123993e-05, |
| "loss": 4.357, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917010239372941e-05, |
| "loss": 4.3497, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916171644621889e-05, |
| "loss": 4.3493, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915333049870837e-05, |
| "loss": 4.3437, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914494455119785e-05, |
| "loss": 4.3472, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913657498249106e-05, |
| "loss": 4.3456, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912818903498054e-05, |
| "loss": 4.3269, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911980308747002e-05, |
| "loss": 4.3363, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91114171399595e-05, |
| "loss": 4.314, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910303119244898e-05, |
| "loss": 4.3246, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909464524493846e-05, |
| "loss": 4.3239, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908625929742794e-05, |
| "loss": 4.3164, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907788972872115e-05, |
| "loss": 4.3199, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906950378121063e-05, |
| "loss": 4.3052, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906111783370011e-05, |
| "loss": 4.3023, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905273188618959e-05, |
| "loss": 4.3077, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90443623174828e-05, |
| "loss": 4.3207, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903597636997228e-05, |
| "loss": 4.2925, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902759042246176e-05, |
| "loss": 4.2872, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901920447495124e-05, |
| "loss": 4.2898, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010834906244455e-05, |
| "loss": 4.3038, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9002448958733935e-05, |
| "loss": 4.2917, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899407939002715e-05, |
| "loss": 4.2946, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985693442516624e-05, |
| "loss": 4.288, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977307495006104e-05, |
| "loss": 4.2907, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968921547495584e-05, |
| "loss": 4.281, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960535599985064e-05, |
| "loss": 4.2814, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952149652474544e-05, |
| "loss": 4.2757, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943763704964024e-05, |
| "loss": 4.286, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935377757453504e-05, |
| "loss": 4.2639, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892700818874671e-05, |
| "loss": 4.2767, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891862224123619e-05, |
| "loss": 4.2724, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891023629372567e-05, |
| "loss": 4.2598, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890185034621515e-05, |
| "loss": 4.2711, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889348077750837e-05, |
| "loss": 4.2657, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888509482999785e-05, |
| "loss": 4.2699, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887670888248733e-05, |
| "loss": 4.2659, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886832293497681e-05, |
| "loss": 4.2481, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885993698746629e-05, |
| "loss": 4.2447, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885155103995577e-05, |
| "loss": 4.2488, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884318147124898e-05, |
| "loss": 4.2588, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883479552373846e-05, |
| "loss": 4.2597, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882640957622794e-05, |
| "loss": 4.2515, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881802362871742e-05, |
| "loss": 4.2403, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88096376812069e-05, |
| "loss": 4.2414, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8801268112500106e-05, |
| "loss": 4.2444, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8792882164989586e-05, |
| "loss": 4.2404, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878449621747907e-05, |
| "loss": 4.2354, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877611026996855e-05, |
| "loss": 4.2504, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876774070126176e-05, |
| "loss": 4.2379, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875935475375124e-05, |
| "loss": 4.2296, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875096880624072e-05, |
| "loss": 4.2271, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.243750095367432, |
| "eval_runtime": 568.7385, |
| "eval_samples_per_second": 670.943, |
| "eval_steps_per_second": 20.967, |
| "step": 76319 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.87425828587302e-05, |
| "loss": 4.2337, |
| "step": 76800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.873419691121968e-05, |
| "loss": 4.2297, |
| "step": 77312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.872581096370916e-05, |
| "loss": 4.2167, |
| "step": 77824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8717425016198635e-05, |
| "loss": 4.2169, |
| "step": 78336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8709039068688115e-05, |
| "loss": 4.2103, |
| "step": 78848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8700653121177595e-05, |
| "loss": 4.2082, |
| "step": 79360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8692267173667075e-05, |
| "loss": 4.2012, |
| "step": 79872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8683881226156555e-05, |
| "loss": 4.2031, |
| "step": 80384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.867549527864604e-05, |
| "loss": 4.2089, |
| "step": 80896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.866710933113552e-05, |
| "loss": 4.2071, |
| "step": 81408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.2035, |
| "step": 81920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.865033743611448e-05, |
| "loss": 4.2064, |
| "step": 82432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.864196786740769e-05, |
| "loss": 4.1982, |
| "step": 82944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.863358191989717e-05, |
| "loss": 4.1931, |
| "step": 83456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.862519597238665e-05, |
| "loss": 4.1904, |
| "step": 83968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.861681002487613e-05, |
| "loss": 4.1642, |
| "step": 84480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.860844045616934e-05, |
| "loss": 4.1869, |
| "step": 84992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.860005450865882e-05, |
| "loss": 4.1843, |
| "step": 85504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.85916685611483e-05, |
| "loss": 4.1838, |
| "step": 86016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.858328261363778e-05, |
| "loss": 4.1949, |
| "step": 86528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8574913044930995e-05, |
| "loss": 4.1788, |
| "step": 87040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8566527097420475e-05, |
| "loss": 4.1812, |
| "step": 87552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8558141149909955e-05, |
| "loss": 4.1798, |
| "step": 88064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8549755202399435e-05, |
| "loss": 4.179, |
| "step": 88576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8541385633692644e-05, |
| "loss": 4.1698, |
| "step": 89088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8532999686182124e-05, |
| "loss": 4.1698, |
| "step": 89600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8524613738671604e-05, |
| "loss": 4.1761, |
| "step": 90112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8516227791161084e-05, |
| "loss": 4.1589, |
| "step": 90624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.850785822245429e-05, |
| "loss": 4.164, |
| "step": 91136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849947227494377e-05, |
| "loss": 4.1519, |
| "step": 91648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849108632743325e-05, |
| "loss": 4.1558, |
| "step": 92160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.848270037992273e-05, |
| "loss": 4.1671, |
| "step": 92672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.847433081121595e-05, |
| "loss": 4.1595, |
| "step": 93184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.846594486370543e-05, |
| "loss": 4.1597, |
| "step": 93696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.845755891619491e-05, |
| "loss": 4.158, |
| "step": 94208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.844917296868439e-05, |
| "loss": 4.1576, |
| "step": 94720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.84408033999776e-05, |
| "loss": 4.1567, |
| "step": 95232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.843241745246708e-05, |
| "loss": 4.1539, |
| "step": 95744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.842403150495656e-05, |
| "loss": 4.1513, |
| "step": 96256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.841564555744604e-05, |
| "loss": 4.1398, |
| "step": 96768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.840727598873925e-05, |
| "loss": 4.1477, |
| "step": 97280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839889004122873e-05, |
| "loss": 4.1316, |
| "step": 97792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839050409371821e-05, |
| "loss": 4.1432, |
| "step": 98304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8382118146207687e-05, |
| "loss": 4.1481, |
| "step": 98816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.83737485775009e-05, |
| "loss": 4.1433, |
| "step": 99328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.836536262999038e-05, |
| "loss": 4.1356, |
| "step": 99840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.835697668247986e-05, |
| "loss": 4.1377, |
| "step": 100352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834859073496934e-05, |
| "loss": 4.1425, |
| "step": 100864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834022116626255e-05, |
| "loss": 4.1222, |
| "step": 101376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.833183521875203e-05, |
| "loss": 4.1254, |
| "step": 101888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.832344927124151e-05, |
| "loss": 4.1251, |
| "step": 102400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.831506332373099e-05, |
| "loss": 4.1396, |
| "step": 102912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.83066937550242e-05, |
| "loss": 4.1173, |
| "step": 103424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.829830780751368e-05, |
| "loss": 4.1383, |
| "step": 103936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.828992186000316e-05, |
| "loss": 4.1073, |
| "step": 104448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.828153591249264e-05, |
| "loss": 4.1229, |
| "step": 104960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8273166343785856e-05, |
| "loss": 4.1215, |
| "step": 105472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8264780396275336e-05, |
| "loss": 4.1249, |
| "step": 105984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8256394448764816e-05, |
| "loss": 4.1156, |
| "step": 106496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8248008501254296e-05, |
| "loss": 4.1191, |
| "step": 107008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8239638932547505e-05, |
| "loss": 4.1065, |
| "step": 107520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8231252985036985e-05, |
| "loss": 4.1168, |
| "step": 108032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8222867037526465e-05, |
| "loss": 4.1083, |
| "step": 108544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8214481090015945e-05, |
| "loss": 4.1133, |
| "step": 109056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8206111521309154e-05, |
| "loss": 4.1083, |
| "step": 109568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8197725573798634e-05, |
| "loss": 4.1046, |
| "step": 110080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8189339626288114e-05, |
| "loss": 4.0879, |
| "step": 110592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8180953678777594e-05, |
| "loss": 4.1127, |
| "step": 111104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.817258411007081e-05, |
| "loss": 4.1025, |
| "step": 111616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.816419816256029e-05, |
| "loss": 4.0941, |
| "step": 112128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.815581221504977e-05, |
| "loss": 4.1016, |
| "step": 112640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.814742626753924e-05, |
| "loss": 4.0982, |
| "step": 113152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813905669883246e-05, |
| "loss": 4.0995, |
| "step": 113664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813067075132194e-05, |
| "loss": 4.1085, |
| "step": 114176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.812228480381141e-05, |
| "loss": 4.1035, |
| "step": 114688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.811389885630089e-05, |
| "loss": 4.0942, |
| "step": 115200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.810552928759411e-05, |
| "loss": 4.0869, |
| "step": 115712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.809714334008359e-05, |
| "loss": 4.0926, |
| "step": 116224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808875739257306e-05, |
| "loss": 4.095, |
| "step": 116736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808037144506255e-05, |
| "loss": 4.1045, |
| "step": 117248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.807200187635576e-05, |
| "loss": 4.0934, |
| "step": 117760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8063615928845243e-05, |
| "loss": 4.0882, |
| "step": 118272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8055229981334717e-05, |
| "loss": 4.0731, |
| "step": 118784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.804686041262793e-05, |
| "loss": 4.0785, |
| "step": 119296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803847446511741e-05, |
| "loss": 4.0815, |
| "step": 119808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8030088517606886e-05, |
| "loss": 4.0834, |
| "step": 120320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8021702570096366e-05, |
| "loss": 4.0908, |
| "step": 120832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.801333300138958e-05, |
| "loss": 4.0829, |
| "step": 121344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.800494705387906e-05, |
| "loss": 4.0749, |
| "step": 121856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7996561106368535e-05, |
| "loss": 4.0793, |
| "step": 122368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7988175158858015e-05, |
| "loss": 4.071, |
| "step": 122880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797980559015123e-05, |
| "loss": 4.0805, |
| "step": 123392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797141964264071e-05, |
| "loss": 4.0798, |
| "step": 123904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.796303369513019e-05, |
| "loss": 4.0608, |
| "step": 124416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.795464774761967e-05, |
| "loss": 4.0623, |
| "step": 124928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7946278178912886e-05, |
| "loss": 4.0697, |
| "step": 125440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.793789223140236e-05, |
| "loss": 4.0802, |
| "step": 125952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7929522662695575e-05, |
| "loss": 4.0656, |
| "step": 126464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7921136715185055e-05, |
| "loss": 4.0741, |
| "step": 126976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7912750767674535e-05, |
| "loss": 4.0662, |
| "step": 127488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.790436482016401e-05, |
| "loss": 4.0645, |
| "step": 128000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.789597887265349e-05, |
| "loss": 4.0701, |
| "step": 128512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.788759292514297e-05, |
| "loss": 4.069, |
| "step": 129024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7879223356436184e-05, |
| "loss": 4.0562, |
| "step": 129536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7870837408925664e-05, |
| "loss": 4.0683, |
| "step": 130048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7862451461415144e-05, |
| "loss": 4.0479, |
| "step": 130560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7854065513904624e-05, |
| "loss": 4.055, |
| "step": 131072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7845679566394104e-05, |
| "loss": 4.0617, |
| "step": 131584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7837293618883584e-05, |
| "loss": 4.0502, |
| "step": 132096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7828907671373064e-05, |
| "loss": 4.0558, |
| "step": 132608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7820521723862544e-05, |
| "loss": 4.0497, |
| "step": 133120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.781215215515575e-05, |
| "loss": 4.0453, |
| "step": 133632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.780378258644896e-05, |
| "loss": 4.0511, |
| "step": 134144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.779539663893844e-05, |
| "loss": 4.0662, |
| "step": 134656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.778701069142792e-05, |
| "loss": 4.0424, |
| "step": 135168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77786247439174e-05, |
| "loss": 4.0417, |
| "step": 135680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777023879640689e-05, |
| "loss": 4.0404, |
| "step": 136192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.776185284889637e-05, |
| "loss": 4.0507, |
| "step": 136704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.775346690138585e-05, |
| "loss": 4.0486, |
| "step": 137216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.774509733267906e-05, |
| "loss": 4.0463, |
| "step": 137728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.773671138516854e-05, |
| "loss": 4.0474, |
| "step": 138240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.772832543765802e-05, |
| "loss": 4.0481, |
| "step": 138752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77199394901475e-05, |
| "loss": 4.0442, |
| "step": 139264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.771155354263698e-05, |
| "loss": 4.0392, |
| "step": 139776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.770316759512646e-05, |
| "loss": 4.0412, |
| "step": 140288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.769478164761594e-05, |
| "loss": 4.0511, |
| "step": 140800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.768639570010542e-05, |
| "loss": 4.0341, |
| "step": 141312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7678026131398626e-05, |
| "loss": 4.0374, |
| "step": 141824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7669640183888106e-05, |
| "loss": 4.0487, |
| "step": 142336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7661254236377586e-05, |
| "loss": 4.0249, |
| "step": 142848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.765286828886707e-05, |
| "loss": 4.0421, |
| "step": 143360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.764451509896401e-05, |
| "loss": 4.0354, |
| "step": 143872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.763612915145349e-05, |
| "loss": 4.047, |
| "step": 144384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.762774320394297e-05, |
| "loss": 4.0394, |
| "step": 144896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761935725643245e-05, |
| "loss": 4.0266, |
| "step": 145408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761097130892193e-05, |
| "loss": 4.0248, |
| "step": 145920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.760258536141141e-05, |
| "loss": 4.0304, |
| "step": 146432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.759421579270462e-05, |
| "loss": 4.0399, |
| "step": 146944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.75858298451941e-05, |
| "loss": 4.0414, |
| "step": 147456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.757744389768358e-05, |
| "loss": 4.0377, |
| "step": 147968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.756905795017306e-05, |
| "loss": 4.0228, |
| "step": 148480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.756067200266254e-05, |
| "loss": 4.0271, |
| "step": 148992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.755228605515202e-05, |
| "loss": 4.0346, |
| "step": 149504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.75439001076415e-05, |
| "loss": 4.0287, |
| "step": 150016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7535530538934716e-05, |
| "loss": 4.0246, |
| "step": 150528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7527144591424196e-05, |
| "loss": 4.0425, |
| "step": 151040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751875864391367e-05, |
| "loss": 4.0232, |
| "step": 151552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751037269640315e-05, |
| "loss": 4.0267, |
| "step": 152064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.750198674889263e-05, |
| "loss": 4.0211, |
| "step": 152576 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.073619365692139, |
| "eval_runtime": 572.2917, |
| "eval_samples_per_second": 666.777, |
| "eval_steps_per_second": 20.837, |
| "step": 152638 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493617180185845e-05, |
| "loss": 4.0314, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748523123267532e-05, |
| "loss": 4.027, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74768452851648e-05, |
| "loss": 4.0185, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746845933765428e-05, |
| "loss": 4.016, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460073390143764e-05, |
| "loss": 4.0105, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451687442633244e-05, |
| "loss": 4.0113, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443301495122724e-05, |
| "loss": 4.0091, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434915547612204e-05, |
| "loss": 4.0082, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426545978905413e-05, |
| "loss": 4.0131, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741816003139489e-05, |
| "loss": 4.0143, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740977408388437e-05, |
| "loss": 4.0136, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740138813637385e-05, |
| "loss": 4.014, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739301856766706e-05, |
| "loss": 4.0086, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738463262015654e-05, |
| "loss": 4.0035, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737624667264602e-05, |
| "loss": 4.0048, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73678607251355e-05, |
| "loss": 3.9782, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735947477762498e-05, |
| "loss": 4.0035, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735108883011446e-05, |
| "loss": 4.0003, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734270288260394e-05, |
| "loss": 3.9989, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733433331389716e-05, |
| "loss": 4.0165, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732594736638664e-05, |
| "loss": 3.9985, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731756141887612e-05, |
| "loss": 4.004, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73091754713656e-05, |
| "loss": 4.0009, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730078952385508e-05, |
| "loss": 3.9989, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729241995514829e-05, |
| "loss": 3.9932, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728403400763777e-05, |
| "loss": 3.9925, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727564806012725e-05, |
| "loss": 4.0005, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726726211261673e-05, |
| "loss": 3.9855, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725887616510621e-05, |
| "loss": 3.9913, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725049021759568e-05, |
| "loss": 3.9783, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7242120648888896e-05, |
| "loss": 3.9875, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723373470137838e-05, |
| "loss": 3.9964, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225348753867856e-05, |
| "loss": 3.9858, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7216962806357336e-05, |
| "loss": 3.9933, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720859323765055e-05, |
| "loss": 3.9882, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720020729014003e-05, |
| "loss": 3.9907, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191821342629505e-05, |
| "loss": 3.9938, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7183435395118985e-05, |
| "loss": 3.9892, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7175049447608465e-05, |
| "loss": 3.9855, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716667987890168e-05, |
| "loss": 3.9746, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158293931391154e-05, |
| "loss": 3.9852, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7149907983880634e-05, |
| "loss": 3.9711, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714152203637012e-05, |
| "loss": 3.9873, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71331360888596e-05, |
| "loss": 3.9839, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712475014134908e-05, |
| "loss": 3.9855, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711638057264229e-05, |
| "loss": 3.9764, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710799462513177e-05, |
| "loss": 3.9774, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709960867762125e-05, |
| "loss": 3.9849, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709122273011073e-05, |
| "loss": 3.9654, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708285316140394e-05, |
| "loss": 3.9712, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707446721389342e-05, |
| "loss": 3.9664, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70660812663829e-05, |
| "loss": 3.9847, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705769531887238e-05, |
| "loss": 3.9653, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704930937136186e-05, |
| "loss": 3.9848, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7040939802655074e-05, |
| "loss": 3.9562, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032553855144554e-05, |
| "loss": 3.9683, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024167907634034e-05, |
| "loss": 3.9729, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015781960123514e-05, |
| "loss": 3.9714, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700741239141672e-05, |
| "loss": 3.9675, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69990264439062e-05, |
| "loss": 3.9809, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699064049639568e-05, |
| "loss": 3.95, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698225454888516e-05, |
| "loss": 3.9665, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697386860137464e-05, |
| "loss": 3.9617, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696549903266785e-05, |
| "loss": 3.9703, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695711308515733e-05, |
| "loss": 3.9581, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694872713764681e-05, |
| "loss": 3.9615, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694034119013629e-05, |
| "loss": 3.9449, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693197162142951e-05, |
| "loss": 3.9678, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692358567391899e-05, |
| "loss": 3.9583, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691519972640847e-05, |
| "loss": 3.9551, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690681377889795e-05, |
| "loss": 3.96, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6898444210191157e-05, |
| "loss": 3.9585, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6890058262680636e-05, |
| "loss": 3.9563, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6881672315170116e-05, |
| "loss": 3.971, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6873286367659596e-05, |
| "loss": 3.9653, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6864916798952806e-05, |
| "loss": 3.9575, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856530851442285e-05, |
| "loss": 3.9497, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6848144903931765e-05, |
| "loss": 3.9519, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6839758956421245e-05, |
| "loss": 3.9594, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6831373008910725e-05, |
| "loss": 3.9669, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682300344020394e-05, |
| "loss": 3.9544, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681461749269342e-05, |
| "loss": 3.9572, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68062315451829e-05, |
| "loss": 3.9387, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679784559767238e-05, |
| "loss": 3.9484, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678945965016186e-05, |
| "loss": 3.9406, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678109008145507e-05, |
| "loss": 3.9499, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677270413394455e-05, |
| "loss": 3.9578, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676431818643403e-05, |
| "loss": 3.9499, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675593223892351e-05, |
| "loss": 3.9422, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674756267021672e-05, |
| "loss": 3.948, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.67391767227062e-05, |
| "loss": 3.9404, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673079077519568e-05, |
| "loss": 3.9511, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.672240482768516e-05, |
| "loss": 3.9478, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6714035258978375e-05, |
| "loss": 3.9334, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705649311467855e-05, |
| "loss": 3.9339, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697263363957335e-05, |
| "loss": 3.9408, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688877416446815e-05, |
| "loss": 3.9506, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668049146893629e-05, |
| "loss": 3.9399, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6672121900229504e-05, |
| "loss": 3.9491, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663735952718984e-05, |
| "loss": 3.9375, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665535000520846e-05, |
| "loss": 3.9383, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664696405769794e-05, |
| "loss": 3.9432, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663859448899115e-05, |
| "loss": 3.9436, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663020854148063e-05, |
| "loss": 3.9319, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662182259397011e-05, |
| "loss": 3.9479, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661343664645959e-05, |
| "loss": 3.9241, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660506707775281e-05, |
| "loss": 3.9281, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659668113024229e-05, |
| "loss": 3.9403, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658829518273176e-05, |
| "loss": 3.9251, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657990923522124e-05, |
| "loss": 3.9378, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657153966651446e-05, |
| "loss": 3.9273, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656315371900393e-05, |
| "loss": 3.9222, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655476777149341e-05, |
| "loss": 3.927, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654638182398289e-05, |
| "loss": 3.9477, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6538012255276106e-05, |
| "loss": 3.9236, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529626307765586e-05, |
| "loss": 3.9264, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6521240360255066e-05, |
| "loss": 3.9186, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512854412744546e-05, |
| "loss": 3.9278, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650448484403776e-05, |
| "loss": 3.9354, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6496098896527235e-05, |
| "loss": 3.9288, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6487712949016715e-05, |
| "loss": 3.9266, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479327001506195e-05, |
| "loss": 3.928, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470957432799404e-05, |
| "loss": 3.9269, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462571485288884e-05, |
| "loss": 3.9262, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6454185537778364e-05, |
| "loss": 3.9242, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6445799590267844e-05, |
| "loss": 3.9338, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6437413642757324e-05, |
| "loss": 3.9209, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642904407405054e-05, |
| "loss": 3.9204, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642065812654002e-05, |
| "loss": 3.9352, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64122721790295e-05, |
| "loss": 3.912, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640388623151898e-05, |
| "loss": 3.9253, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639551666281219e-05, |
| "loss": 3.9236, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638713071530167e-05, |
| "loss": 3.934, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637874476779115e-05, |
| "loss": 3.9249, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637035882028063e-05, |
| "loss": 3.9197, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636198925157384e-05, |
| "loss": 3.9108, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635360330406332e-05, |
| "loss": 3.9162, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63452173565528e-05, |
| "loss": 3.9275, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633683140904228e-05, |
| "loss": 3.9305, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6328461840335494e-05, |
| "loss": 3.9262, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6320075892824974e-05, |
| "loss": 3.915, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6311689945314454e-05, |
| "loss": 3.9144, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6303303997803934e-05, |
| "loss": 3.9286, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629493442909714e-05, |
| "loss": 3.9156, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628654848158662e-05, |
| "loss": 3.9166, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.62781625340761e-05, |
| "loss": 3.9303, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626977658656558e-05, |
| "loss": 3.9154, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626140701785879e-05, |
| "loss": 3.9199, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625302107034827e-05, |
| "loss": 3.9159, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.0001678466796875, |
| "eval_runtime": 585.2038, |
| "eval_samples_per_second": 652.065, |
| "eval_steps_per_second": 20.378, |
| "step": 228957 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.624463512283775e-05, |
| "loss": 3.9246, |
| "step": 229376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.623624917532723e-05, |
| "loss": 3.9203, |
| "step": 229888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.622786322781671e-05, |
| "loss": 3.9119, |
| "step": 230400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.62194772803062e-05, |
| "loss": 3.9105, |
| "step": 230912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.621109133279568e-05, |
| "loss": 3.9081, |
| "step": 231424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.620270538528516e-05, |
| "loss": 3.9044, |
| "step": 231936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.619431943777464e-05, |
| "loss": 3.9074, |
| "step": 232448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.618593349026412e-05, |
| "loss": 3.9021, |
| "step": 232960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.617756392155733e-05, |
| "loss": 3.9096, |
| "step": 233472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.616917797404681e-05, |
| "loss": 3.9161, |
| "step": 233984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.616079202653629e-05, |
| "loss": 3.9076, |
| "step": 234496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.615240607902577e-05, |
| "loss": 3.9071, |
| "step": 235008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6144036510318976e-05, |
| "loss": 3.9075, |
| "step": 235520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6135650562808456e-05, |
| "loss": 3.9042, |
| "step": 236032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6127264615297936e-05, |
| "loss": 3.901, |
| "step": 236544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6118878667787416e-05, |
| "loss": 3.8802, |
| "step": 237056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.611050909908063e-05, |
| "loss": 3.8977, |
| "step": 237568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.610212315157011e-05, |
| "loss": 3.899, |
| "step": 238080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.609373720405959e-05, |
| "loss": 3.9005, |
| "step": 238592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6085351256549065e-05, |
| "loss": 3.9142, |
| "step": 239104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.607698168784228e-05, |
| "loss": 3.8995, |
| "step": 239616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606859574033176e-05, |
| "loss": 3.9076, |
| "step": 240128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606020979282124e-05, |
| "loss": 3.8965, |
| "step": 240640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.605184022411445e-05, |
| "loss": 3.8974, |
| "step": 241152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.604345427660393e-05, |
| "loss": 3.896, |
| "step": 241664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.603506832909341e-05, |
| "loss": 3.8981, |
| "step": 242176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.602668238158289e-05, |
| "loss": 3.8992, |
| "step": 242688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.60183128128761e-05, |
| "loss": 3.8885, |
| "step": 243200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6009926865365585e-05, |
| "loss": 3.8909, |
| "step": 243712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6001540917855065e-05, |
| "loss": 3.8819, |
| "step": 244224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.599315497034454e-05, |
| "loss": 3.8913, |
| "step": 244736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.598476902283402e-05, |
| "loss": 3.899, |
| "step": 245248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5976399454127234e-05, |
| "loss": 3.8937, |
| "step": 245760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5968013506616714e-05, |
| "loss": 3.8955, |
| "step": 246272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595962755910619e-05, |
| "loss": 3.8936, |
| "step": 246784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5951257990399403e-05, |
| "loss": 3.8949, |
| "step": 247296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.594287204288888e-05, |
| "loss": 3.8959, |
| "step": 247808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.593448609537836e-05, |
| "loss": 3.8968, |
| "step": 248320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5926100147867836e-05, |
| "loss": 3.8883, |
| "step": 248832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.591773057916105e-05, |
| "loss": 3.8805, |
| "step": 249344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.590934463165054e-05, |
| "loss": 3.8909, |
| "step": 249856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.590095868414001e-05, |
| "loss": 3.8738, |
| "step": 250368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.589257273662949e-05, |
| "loss": 3.8977, |
| "step": 250880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.588420316792271e-05, |
| "loss": 3.8916, |
| "step": 251392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.587581722041219e-05, |
| "loss": 3.8904, |
| "step": 251904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.586743127290166e-05, |
| "loss": 3.8835, |
| "step": 252416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585904532539114e-05, |
| "loss": 3.8835, |
| "step": 252928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585067575668436e-05, |
| "loss": 3.8957, |
| "step": 253440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.584228980917384e-05, |
| "loss": 3.8722, |
| "step": 253952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.583390386166331e-05, |
| "loss": 3.8779, |
| "step": 254464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.582551791415279e-05, |
| "loss": 3.8791, |
| "step": 254976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5817148345446006e-05, |
| "loss": 3.8902, |
| "step": 255488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5808762397935486e-05, |
| "loss": 3.8723, |
| "step": 256000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5800376450424966e-05, |
| "loss": 3.8931, |
| "step": 256512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5791990502914446e-05, |
| "loss": 3.8707, |
| "step": 257024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.578362093420766e-05, |
| "loss": 3.8716, |
| "step": 257536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5775234986697135e-05, |
| "loss": 3.8867, |
| "step": 258048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5766849039186615e-05, |
| "loss": 3.874, |
| "step": 258560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5758463091676095e-05, |
| "loss": 3.8863, |
| "step": 259072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.575009352296931e-05, |
| "loss": 3.8918, |
| "step": 259584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5741707575458784e-05, |
| "loss": 3.8653, |
| "step": 260096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5733321627948264e-05, |
| "loss": 3.8725, |
| "step": 260608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5724935680437744e-05, |
| "loss": 3.8754, |
| "step": 261120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.571656611173096e-05, |
| "loss": 3.8805, |
| "step": 261632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.570818016422044e-05, |
| "loss": 3.8697, |
| "step": 262144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569979421670992e-05, |
| "loss": 3.8735, |
| "step": 262656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.56914082691994e-05, |
| "loss": 3.8568, |
| "step": 263168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.568303870049261e-05, |
| "loss": 3.8805, |
| "step": 263680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.567465275298209e-05, |
| "loss": 3.8711, |
| "step": 264192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.566626680547157e-05, |
| "loss": 3.8672, |
| "step": 264704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.565788085796105e-05, |
| "loss": 3.8717, |
| "step": 265216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564951128925426e-05, |
| "loss": 3.8724, |
| "step": 265728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564112534174374e-05, |
| "loss": 3.8714, |
| "step": 266240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.563273939423322e-05, |
| "loss": 3.8829, |
| "step": 266752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.56243534467227e-05, |
| "loss": 3.8802, |
| "step": 267264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.561598387801591e-05, |
| "loss": 3.874, |
| "step": 267776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.560759793050539e-05, |
| "loss": 3.867, |
| "step": 268288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559921198299487e-05, |
| "loss": 3.8681, |
| "step": 268800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559082603548435e-05, |
| "loss": 3.871, |
| "step": 269312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.558245646677756e-05, |
| "loss": 3.8833, |
| "step": 269824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.557407051926704e-05, |
| "loss": 3.8726, |
| "step": 270336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.556568457175652e-05, |
| "loss": 3.8659, |
| "step": 270848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5557298624246e-05, |
| "loss": 3.86, |
| "step": 271360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554892905553921e-05, |
| "loss": 3.8646, |
| "step": 271872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554054310802869e-05, |
| "loss": 3.8588, |
| "step": 272384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.553215716051817e-05, |
| "loss": 3.8657, |
| "step": 272896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.552377121300765e-05, |
| "loss": 3.8681, |
| "step": 273408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.551540164430087e-05, |
| "loss": 3.8672, |
| "step": 273920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.550701569679035e-05, |
| "loss": 3.8613, |
| "step": 274432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.549862974927983e-05, |
| "loss": 3.8701, |
| "step": 274944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.549024380176931e-05, |
| "loss": 3.859, |
| "step": 275456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5481874233062516e-05, |
| "loss": 3.8636, |
| "step": 275968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5473488285551996e-05, |
| "loss": 3.8643, |
| "step": 276480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5465102338041476e-05, |
| "loss": 3.8522, |
| "step": 276992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5456716390530956e-05, |
| "loss": 3.8508, |
| "step": 277504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5448346821824165e-05, |
| "loss": 3.8648, |
| "step": 278016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5439960874313645e-05, |
| "loss": 3.862, |
| "step": 278528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5431574926803125e-05, |
| "loss": 3.868, |
| "step": 279040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5423188979292605e-05, |
| "loss": 3.8632, |
| "step": 279552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.541481941058582e-05, |
| "loss": 3.8568, |
| "step": 280064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54064334630753e-05, |
| "loss": 3.8565, |
| "step": 280576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.539804751556478e-05, |
| "loss": 3.8654, |
| "step": 281088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538966156805426e-05, |
| "loss": 3.8646, |
| "step": 281600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538129199934747e-05, |
| "loss": 3.8516, |
| "step": 282112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.537290605183695e-05, |
| "loss": 3.869, |
| "step": 282624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.536452010432643e-05, |
| "loss": 3.8431, |
| "step": 283136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.535613415681591e-05, |
| "loss": 3.8524, |
| "step": 283648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.534776458810912e-05, |
| "loss": 3.8605, |
| "step": 284160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.53393786405986e-05, |
| "loss": 3.847, |
| "step": 284672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.533099269308808e-05, |
| "loss": 3.8602, |
| "step": 285184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.532260674557756e-05, |
| "loss": 3.8454, |
| "step": 285696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5314237176870774e-05, |
| "loss": 3.8418, |
| "step": 286208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5305851229360254e-05, |
| "loss": 3.8496, |
| "step": 286720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5297465281849734e-05, |
| "loss": 3.8695, |
| "step": 287232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5289079334339214e-05, |
| "loss": 3.8486, |
| "step": 287744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.528070976563242e-05, |
| "loss": 3.8484, |
| "step": 288256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.52723238181219e-05, |
| "loss": 3.8419, |
| "step": 288768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.526393787061138e-05, |
| "loss": 3.8506, |
| "step": 289280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.525555192310086e-05, |
| "loss": 3.857, |
| "step": 289792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.524718235439407e-05, |
| "loss": 3.8533, |
| "step": 290304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.523879640688355e-05, |
| "loss": 3.8446, |
| "step": 290816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.523041045937303e-05, |
| "loss": 3.8561, |
| "step": 291328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.522202451186251e-05, |
| "loss": 3.8503, |
| "step": 291840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.521365494315573e-05, |
| "loss": 3.851, |
| "step": 292352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.520526899564521e-05, |
| "loss": 3.8456, |
| "step": 292864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.519688304813469e-05, |
| "loss": 3.8576, |
| "step": 293376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.518849710062417e-05, |
| "loss": 3.8462, |
| "step": 293888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.518012753191738e-05, |
| "loss": 3.8458, |
| "step": 294400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.517174158440686e-05, |
| "loss": 3.8599, |
| "step": 294912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.516335563689634e-05, |
| "loss": 3.8362, |
| "step": 295424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.515496968938582e-05, |
| "loss": 3.8494, |
| "step": 295936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5146600120679026e-05, |
| "loss": 3.8517, |
| "step": 296448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5138214173168506e-05, |
| "loss": 3.8567, |
| "step": 296960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5129828225657986e-05, |
| "loss": 3.849, |
| "step": 297472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5121442278147466e-05, |
| "loss": 3.8474, |
| "step": 297984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.511307270944068e-05, |
| "loss": 3.8374, |
| "step": 298496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.510468676193016e-05, |
| "loss": 3.8415, |
| "step": 299008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.509630081441964e-05, |
| "loss": 3.8513, |
| "step": 299520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.508791486690912e-05, |
| "loss": 3.8564, |
| "step": 300032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507954529820233e-05, |
| "loss": 3.8545, |
| "step": 300544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507115935069181e-05, |
| "loss": 3.8439, |
| "step": 301056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.506277340318129e-05, |
| "loss": 3.8378, |
| "step": 301568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.505438745567077e-05, |
| "loss": 3.854, |
| "step": 302080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.504601788696398e-05, |
| "loss": 3.843, |
| "step": 302592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.503763193945346e-05, |
| "loss": 3.8447, |
| "step": 303104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502924599194294e-05, |
| "loss": 3.8556, |
| "step": 303616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5020876423236155e-05, |
| "loss": 3.8464, |
| "step": 304128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5012490475725635e-05, |
| "loss": 3.8461, |
| "step": 304640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5004104528215115e-05, |
| "loss": 3.8439, |
| "step": 305152 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9584410190582275, |
| "eval_runtime": 567.4578, |
| "eval_samples_per_second": 672.457, |
| "eval_steps_per_second": 21.015, |
| "step": 305276 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4995718580704595e-05, |
| "loss": 3.8527, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4987332633194075e-05, |
| "loss": 3.8476, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4978946685683555e-05, |
| "loss": 3.8394, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4970560738173035e-05, |
| "loss": 3.8381, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496217479066251e-05, |
| "loss": 3.838, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495378884315199e-05, |
| "loss": 3.8335, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494540289564147e-05, |
| "loss": 3.8376, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493701694813095e-05, |
| "loss": 3.83, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492864737942416e-05, |
| "loss": 3.8396, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492027781071737e-05, |
| "loss": 3.8461, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.491189186320685e-05, |
| "loss": 3.8329, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.490350591569633e-05, |
| "loss": 3.8342, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.489513634698955e-05, |
| "loss": 3.8393, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488675039947903e-05, |
| "loss": 3.835, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487836445196851e-05, |
| "loss": 3.8332, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486997850445798e-05, |
| "loss": 3.8105, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486159255694746e-05, |
| "loss": 3.8275, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485320660943694e-05, |
| "loss": 3.8306, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484482066192642e-05, |
| "loss": 3.8311, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48364347144159e-05, |
| "loss": 3.8493, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.482804876690538e-05, |
| "loss": 3.8264, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481966281939486e-05, |
| "loss": 3.8397, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481127687188434e-05, |
| "loss": 3.8257, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480290730317756e-05, |
| "loss": 3.8324, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479452135566704e-05, |
| "loss": 3.8238, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478613540815652e-05, |
| "loss": 3.8305, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4777749460646e-05, |
| "loss": 3.8314, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4769379891939207e-05, |
| "loss": 3.8244, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4760993944428687e-05, |
| "loss": 3.8225, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4752607996918166e-05, |
| "loss": 3.8166, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4744222049407646e-05, |
| "loss": 3.8183, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4735852480700856e-05, |
| "loss": 3.8307, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4727466533190335e-05, |
| "loss": 3.8272, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4719080585679815e-05, |
| "loss": 3.8313, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4710694638169295e-05, |
| "loss": 3.8261, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470232506946251e-05, |
| "loss": 3.8296, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469393912195199e-05, |
| "loss": 3.828, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468555317444147e-05, |
| "loss": 3.8298, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.467716722693095e-05, |
| "loss": 3.8245, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466879765822416e-05, |
| "loss": 3.8148, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466041171071364e-05, |
| "loss": 3.8295, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465202576320312e-05, |
| "loss": 3.8086, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.46436398156926e-05, |
| "loss": 3.8284, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463527024698581e-05, |
| "loss": 3.8243, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.462688429947529e-05, |
| "loss": 3.8302, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461849835196477e-05, |
| "loss": 3.8193, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461011240445425e-05, |
| "loss": 3.8167, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4601742835747465e-05, |
| "loss": 3.8301, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4593356888236945e-05, |
| "loss": 3.811, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4584970940726425e-05, |
| "loss": 3.8128, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4576584993215905e-05, |
| "loss": 3.8157, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4568215424509114e-05, |
| "loss": 3.8203, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4559829476998594e-05, |
| "loss": 3.8119, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551443529488074e-05, |
| "loss": 3.8246, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4543057581977554e-05, |
| "loss": 3.8073, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.453468801327076e-05, |
| "loss": 3.8109, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.452630206576024e-05, |
| "loss": 3.8213, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451791611824972e-05, |
| "loss": 3.8071, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45095301707392e-05, |
| "loss": 3.8261, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450116060203242e-05, |
| "loss": 3.831, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44927746545219e-05, |
| "loss": 3.7996, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448438870701138e-05, |
| "loss": 3.8079, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447600275950086e-05, |
| "loss": 3.8134, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446763319079407e-05, |
| "loss": 3.8226, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445924724328355e-05, |
| "loss": 3.8069, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445086129577303e-05, |
| "loss": 3.8078, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444247534826251e-05, |
| "loss": 3.7981, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4434105779555717e-05, |
| "loss": 3.8145, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4425719832045196e-05, |
| "loss": 3.8098, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4417333884534676e-05, |
| "loss": 3.8025, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4408947937024156e-05, |
| "loss": 3.8112, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440057836831737e-05, |
| "loss": 3.8099, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439219242080685e-05, |
| "loss": 3.8091, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438380647329633e-05, |
| "loss": 3.8213, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437543690458954e-05, |
| "loss": 3.8183, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436705095707902e-05, |
| "loss": 3.8131, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43586650095685e-05, |
| "loss": 3.8078, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435027906205798e-05, |
| "loss": 3.8104, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434190949335119e-05, |
| "loss": 3.805, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433352354584067e-05, |
| "loss": 3.8279, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432513759833015e-05, |
| "loss": 3.8094, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431675165081963e-05, |
| "loss": 3.807, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430838208211284e-05, |
| "loss": 3.7959, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4300012513406055e-05, |
| "loss": 3.8106, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4291626565895535e-05, |
| "loss": 3.7961, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4283240618385015e-05, |
| "loss": 3.8083, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4274854670874495e-05, |
| "loss": 3.8067, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4266468723363975e-05, |
| "loss": 3.8056, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4258082775853455e-05, |
| "loss": 3.8024, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4249696828342935e-05, |
| "loss": 3.8116, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4241327259636144e-05, |
| "loss": 3.7984, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4232941312125624e-05, |
| "loss": 3.8047, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4224555364615104e-05, |
| "loss": 3.8078, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4216169417104584e-05, |
| "loss": 3.7899, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.420779984839779e-05, |
| "loss": 3.7936, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419941390088728e-05, |
| "loss": 3.8082, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419102795337676e-05, |
| "loss": 3.7993, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418264200586624e-05, |
| "loss": 3.8147, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417427243715945e-05, |
| "loss": 3.8011, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416590286845266e-05, |
| "loss": 3.7934, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415751692094214e-05, |
| "loss": 3.7991, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414913097343162e-05, |
| "loss": 3.8052, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.41407450259211e-05, |
| "loss": 3.8063, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413235907841058e-05, |
| "loss": 3.7956, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.412397313090006e-05, |
| "loss": 3.8089, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411558718338953e-05, |
| "loss": 3.785, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4107217614682747e-05, |
| "loss": 3.793, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409883166717223e-05, |
| "loss": 3.804, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409044571966171e-05, |
| "loss": 3.7861, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4082059772151186e-05, |
| "loss": 3.8018, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40736902034444e-05, |
| "loss": 3.7973, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406532063473761e-05, |
| "loss": 3.7789, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405693468722709e-05, |
| "loss": 3.7933, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404854873971657e-05, |
| "loss": 3.8113, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404016279220605e-05, |
| "loss": 3.7903, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403177684469553e-05, |
| "loss": 3.7929, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4023390897185004e-05, |
| "loss": 3.7864, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4015004949674484e-05, |
| "loss": 3.7913, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400661900216397e-05, |
| "loss": 3.8028, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399824943345719e-05, |
| "loss": 3.7962, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398986348594666e-05, |
| "loss": 3.7887, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398147753843614e-05, |
| "loss": 3.7944, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397309159092562e-05, |
| "loss": 3.7962, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3964738401022565e-05, |
| "loss": 3.7932, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3956352453512045e-05, |
| "loss": 3.7921, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3947966506001525e-05, |
| "loss": 3.8002, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3939580558491005e-05, |
| "loss": 3.7864, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393119461098048e-05, |
| "loss": 3.795, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392280866346996e-05, |
| "loss": 3.799, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.391442271595944e-05, |
| "loss": 3.7845, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3906036768448925e-05, |
| "loss": 3.7901, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3897667199742134e-05, |
| "loss": 3.7978, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3889281252231614e-05, |
| "loss": 3.7978, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3880895304721094e-05, |
| "loss": 3.7974, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872509357210574e-05, |
| "loss": 3.7909, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.386413978850378e-05, |
| "loss": 3.7821, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.385575384099326e-05, |
| "loss": 3.7864, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.384736789348274e-05, |
| "loss": 3.7941, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383898194597222e-05, |
| "loss": 3.8014, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383061237726543e-05, |
| "loss": 3.8019, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382222642975491e-05, |
| "loss": 3.7914, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.381384048224439e-05, |
| "loss": 3.7805, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380547091353761e-05, |
| "loss": 3.7986, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.379708496602709e-05, |
| "loss": 3.7855, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378869901851657e-05, |
| "loss": 3.7909, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378031307100605e-05, |
| "loss": 3.8031, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377192712349553e-05, |
| "loss": 3.793, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.376354117598501e-05, |
| "loss": 3.7864, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375515522847449e-05, |
| "loss": 3.7926, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9340226650238037, |
| "eval_runtime": 557.6516, |
| "eval_samples_per_second": 684.282, |
| "eval_steps_per_second": 21.384, |
| "step": 381595 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.374676928096397e-05, |
| "loss": 3.8003, |
| "step": 381952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.373838333345345e-05, |
| "loss": 3.7969, |
| "step": 382464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.372999738594293e-05, |
| "loss": 3.7822, |
| "step": 382976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.372161143843241e-05, |
| "loss": 3.7822, |
| "step": 383488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3713241869725616e-05, |
| "loss": 3.7876, |
| "step": 384000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3704855922215096e-05, |
| "loss": 3.7809, |
| "step": 384512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3696469974704576e-05, |
| "loss": 3.7832, |
| "step": 385024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.368808402719406e-05, |
| "loss": 3.7703, |
| "step": 385536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.367969807968354e-05, |
| "loss": 3.7881, |
| "step": 386048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.367132851097675e-05, |
| "loss": 3.7967, |
| "step": 386560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.366294256346623e-05, |
| "loss": 3.7782, |
| "step": 387072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.365455661595571e-05, |
| "loss": 3.7794, |
| "step": 387584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.364618704724892e-05, |
| "loss": 3.7882, |
| "step": 388096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.36378010997384e-05, |
| "loss": 3.7807, |
| "step": 388608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362941515222788e-05, |
| "loss": 3.7828, |
| "step": 389120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362102920471736e-05, |
| "loss": 3.7584, |
| "step": 389632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.361264325720684e-05, |
| "loss": 3.7721, |
| "step": 390144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3604257309696314e-05, |
| "loss": 3.7766, |
| "step": 390656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.35958713621858e-05, |
| "loss": 3.7786, |
| "step": 391168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.358748541467528e-05, |
| "loss": 3.7968, |
| "step": 391680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357909946716476e-05, |
| "loss": 3.7764, |
| "step": 392192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357071351965424e-05, |
| "loss": 3.7889, |
| "step": 392704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.356232757214372e-05, |
| "loss": 3.7749, |
| "step": 393216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.355395800343693e-05, |
| "loss": 3.7805, |
| "step": 393728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.354557205592641e-05, |
| "loss": 3.7697, |
| "step": 394240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.353718610841589e-05, |
| "loss": 3.7767, |
| "step": 394752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.352880016090537e-05, |
| "loss": 3.7796, |
| "step": 395264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.352041421339485e-05, |
| "loss": 3.7732, |
| "step": 395776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.351202826588433e-05, |
| "loss": 3.7698, |
| "step": 396288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.350365869717754e-05, |
| "loss": 3.7662, |
| "step": 396800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.349527274966702e-05, |
| "loss": 3.762, |
| "step": 397312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.34868868021565e-05, |
| "loss": 3.7814, |
| "step": 397824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.347850085464598e-05, |
| "loss": 3.7815, |
| "step": 398336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3470114907135465e-05, |
| "loss": 3.7769, |
| "step": 398848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3461728959624945e-05, |
| "loss": 3.7761, |
| "step": 399360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3453343012114425e-05, |
| "loss": 3.7789, |
| "step": 399872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3444973443407634e-05, |
| "loss": 3.7801, |
| "step": 400384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3436587495897114e-05, |
| "loss": 3.7749, |
| "step": 400896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.342821792719032e-05, |
| "loss": 3.7767, |
| "step": 401408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.34198319796798e-05, |
| "loss": 3.7649, |
| "step": 401920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.341144603216928e-05, |
| "loss": 3.7773, |
| "step": 402432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.340306008465876e-05, |
| "loss": 3.7593, |
| "step": 402944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.339469051595197e-05, |
| "loss": 3.78, |
| "step": 403456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.338630456844145e-05, |
| "loss": 3.7716, |
| "step": 403968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.337791862093093e-05, |
| "loss": 3.7786, |
| "step": 404480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.336953267342042e-05, |
| "loss": 3.7693, |
| "step": 404992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.33611467259099e-05, |
| "loss": 3.7677, |
| "step": 405504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.335277715720311e-05, |
| "loss": 3.7837, |
| "step": 406016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.334439120969259e-05, |
| "loss": 3.7571, |
| "step": 406528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.333600526218207e-05, |
| "loss": 3.764, |
| "step": 407040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.332761931467155e-05, |
| "loss": 3.767, |
| "step": 407552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331923336716103e-05, |
| "loss": 3.7694, |
| "step": 408064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331086379845424e-05, |
| "loss": 3.765, |
| "step": 408576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3302477850943717e-05, |
| "loss": 3.7715, |
| "step": 409088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3294091903433197e-05, |
| "loss": 3.7625, |
| "step": 409600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3285705955922677e-05, |
| "loss": 3.76, |
| "step": 410112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3277320008412156e-05, |
| "loss": 3.7724, |
| "step": 410624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326895043970537e-05, |
| "loss": 3.7613, |
| "step": 411136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326056449219485e-05, |
| "loss": 3.7727, |
| "step": 411648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3252178544684325e-05, |
| "loss": 3.7798, |
| "step": 412160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3243792597173805e-05, |
| "loss": 3.7544, |
| "step": 412672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3235406649663285e-05, |
| "loss": 3.7581, |
| "step": 413184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3227020702152765e-05, |
| "loss": 3.7609, |
| "step": 413696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3218634754642245e-05, |
| "loss": 3.7775, |
| "step": 414208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3210248807131725e-05, |
| "loss": 3.7595, |
| "step": 414720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3201879238424934e-05, |
| "loss": 3.7617, |
| "step": 415232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3193493290914414e-05, |
| "loss": 3.749, |
| "step": 415744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3185107343403894e-05, |
| "loss": 3.7681, |
| "step": 416256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3176721395893374e-05, |
| "loss": 3.7582, |
| "step": 416768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3168335448382854e-05, |
| "loss": 3.7554, |
| "step": 417280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.315996587967607e-05, |
| "loss": 3.7619, |
| "step": 417792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.315157993216555e-05, |
| "loss": 3.7634, |
| "step": 418304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.314319398465503e-05, |
| "loss": 3.7608, |
| "step": 418816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.313480803714451e-05, |
| "loss": 3.7722, |
| "step": 419328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.312642208963399e-05, |
| "loss": 3.771, |
| "step": 419840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.31180525209272e-05, |
| "loss": 3.7646, |
| "step": 420352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310966657341668e-05, |
| "loss": 3.76, |
| "step": 420864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310128062590616e-05, |
| "loss": 3.7646, |
| "step": 421376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.309289467839564e-05, |
| "loss": 3.7534, |
| "step": 421888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.308450873088512e-05, |
| "loss": 3.776, |
| "step": 422400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.30761227833746e-05, |
| "loss": 3.7606, |
| "step": 422912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.306775321466781e-05, |
| "loss": 3.7629, |
| "step": 423424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.305936726715729e-05, |
| "loss": 3.7494, |
| "step": 423936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3050981319646775e-05, |
| "loss": 3.7631, |
| "step": 424448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3042595372136254e-05, |
| "loss": 3.7462, |
| "step": 424960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3034225803429464e-05, |
| "loss": 3.7624, |
| "step": 425472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3025839855918944e-05, |
| "loss": 3.7598, |
| "step": 425984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3017453908408424e-05, |
| "loss": 3.7581, |
| "step": 426496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3009067960897903e-05, |
| "loss": 3.7588, |
| "step": 427008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.300069839219111e-05, |
| "loss": 3.7677, |
| "step": 427520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.299231244468059e-05, |
| "loss": 3.7459, |
| "step": 428032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.298392649717007e-05, |
| "loss": 3.7554, |
| "step": 428544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.297554054965955e-05, |
| "loss": 3.7627, |
| "step": 429056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.296715460214903e-05, |
| "loss": 3.7443, |
| "step": 429568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.295876865463851e-05, |
| "loss": 3.7478, |
| "step": 430080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.295039908593173e-05, |
| "loss": 3.7611, |
| "step": 430592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.294201313842121e-05, |
| "loss": 3.7494, |
| "step": 431104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.293362719091069e-05, |
| "loss": 3.7728, |
| "step": 431616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.292524124340016e-05, |
| "loss": 3.7558, |
| "step": 432128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.291685529588964e-05, |
| "loss": 3.7469, |
| "step": 432640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.290846934837912e-05, |
| "loss": 3.751, |
| "step": 433152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29000834008686e-05, |
| "loss": 3.7602, |
| "step": 433664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.289169745335808e-05, |
| "loss": 3.7617, |
| "step": 434176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.288332788465129e-05, |
| "loss": 3.7472, |
| "step": 434688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2874958315944506e-05, |
| "loss": 3.7623, |
| "step": 435200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.286657236843398e-05, |
| "loss": 3.739, |
| "step": 435712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2858186420923466e-05, |
| "loss": 3.7457, |
| "step": 436224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2849800473412946e-05, |
| "loss": 3.7575, |
| "step": 436736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.284143090470616e-05, |
| "loss": 3.7443, |
| "step": 437248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2833044957195635e-05, |
| "loss": 3.7542, |
| "step": 437760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2824659009685115e-05, |
| "loss": 3.7568, |
| "step": 438272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2816273062174595e-05, |
| "loss": 3.7299, |
| "step": 438784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2807887114664075e-05, |
| "loss": 3.7498, |
| "step": 439296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2799517545957284e-05, |
| "loss": 3.764, |
| "step": 439808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2791131598446764e-05, |
| "loss": 3.7503, |
| "step": 440320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2782745650936244e-05, |
| "loss": 3.7433, |
| "step": 440832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2774359703425724e-05, |
| "loss": 3.7418, |
| "step": 441344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2765973755915204e-05, |
| "loss": 3.7459, |
| "step": 441856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.275760418720842e-05, |
| "loss": 3.7572, |
| "step": 442368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.27492182396979e-05, |
| "loss": 3.7536, |
| "step": 442880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274083229218738e-05, |
| "loss": 3.7415, |
| "step": 443392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.273244634467686e-05, |
| "loss": 3.7491, |
| "step": 443904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.272407677597007e-05, |
| "loss": 3.7498, |
| "step": 444416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.271569082845955e-05, |
| "loss": 3.7523, |
| "step": 444928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.270730488094903e-05, |
| "loss": 3.7452, |
| "step": 445440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.269891893343851e-05, |
| "loss": 3.7543, |
| "step": 445952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.269054936473172e-05, |
| "loss": 3.746, |
| "step": 446464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26821634172212e-05, |
| "loss": 3.7496, |
| "step": 446976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.267377746971068e-05, |
| "loss": 3.7544, |
| "step": 447488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.266539152220016e-05, |
| "loss": 3.7386, |
| "step": 448000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.265702195349337e-05, |
| "loss": 3.7462, |
| "step": 448512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264863600598285e-05, |
| "loss": 3.7556, |
| "step": 449024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264025005847233e-05, |
| "loss": 3.7519, |
| "step": 449536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.263186411096181e-05, |
| "loss": 3.7525, |
| "step": 450048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.262347816345129e-05, |
| "loss": 3.7478, |
| "step": 450560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26151085947445e-05, |
| "loss": 3.7348, |
| "step": 451072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.260672264723398e-05, |
| "loss": 3.7421, |
| "step": 451584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.259833669972346e-05, |
| "loss": 3.749, |
| "step": 452096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.258995075221294e-05, |
| "loss": 3.7639, |
| "step": 452608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.258156480470242e-05, |
| "loss": 3.7548, |
| "step": 453120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.25731788571919e-05, |
| "loss": 3.7446, |
| "step": 453632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.256480928848511e-05, |
| "loss": 3.7444, |
| "step": 454144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.255642334097459e-05, |
| "loss": 3.7504, |
| "step": 454656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.254803739346407e-05, |
| "loss": 3.7437, |
| "step": 455168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253965144595356e-05, |
| "loss": 3.7437, |
| "step": 455680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253128187724677e-05, |
| "loss": 3.762, |
| "step": 456192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.252289592973625e-05, |
| "loss": 3.7486, |
| "step": 456704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.251450998222573e-05, |
| "loss": 3.7418, |
| "step": 457216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.250612403471521e-05, |
| "loss": 3.7493, |
| "step": 457728 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.918626546859741, |
| "eval_runtime": 569.637, |
| "eval_samples_per_second": 669.884, |
| "eval_steps_per_second": 20.934, |
| "step": 457914 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249773808720469e-05, |
| "loss": 3.7555, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248935213969417e-05, |
| "loss": 3.7508, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248096619218364e-05, |
| "loss": 3.7427, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247258024467312e-05, |
| "loss": 3.7431, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2464210675966336e-05, |
| "loss": 3.7416, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2455824728455816e-05, |
| "loss": 3.7362, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2447438780945296e-05, |
| "loss": 3.741, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2439052833434776e-05, |
| "loss": 3.7294, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243068326472799e-05, |
| "loss": 3.743, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242229731721747e-05, |
| "loss": 3.7561, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2413911369706945e-05, |
| "loss": 3.737, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2405525422196424e-05, |
| "loss": 3.7361, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2397139474685904e-05, |
| "loss": 3.7456, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2388769905979114e-05, |
| "loss": 3.7376, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2380383958468593e-05, |
| "loss": 3.7439, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2371998010958073e-05, |
| "loss": 3.7161, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2363612063447553e-05, |
| "loss": 3.7259, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235524249474076e-05, |
| "loss": 3.7337, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234685654723025e-05, |
| "loss": 3.7337, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233847059971973e-05, |
| "loss": 3.7588, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233008465220921e-05, |
| "loss": 3.7338, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232169870469869e-05, |
| "loss": 3.7491, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23133291359919e-05, |
| "loss": 3.7287, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230494318848138e-05, |
| "loss": 3.7379, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.229655724097086e-05, |
| "loss": 3.7287, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228817129346034e-05, |
| "loss": 3.7367, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227980172475355e-05, |
| "loss": 3.7381, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227141577724303e-05, |
| "loss": 3.7299, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226302982973251e-05, |
| "loss": 3.7348, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225464388222199e-05, |
| "loss": 3.7242, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2246274313515196e-05, |
| "loss": 3.7187, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.223788836600468e-05, |
| "loss": 3.7397, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222950241849416e-05, |
| "loss": 3.7379, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222111647098364e-05, |
| "loss": 3.7398, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221274690227685e-05, |
| "loss": 3.7309, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220436095476633e-05, |
| "loss": 3.7402, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219597500725581e-05, |
| "loss": 3.7346, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.218758905974529e-05, |
| "loss": 3.7365, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21792194910385e-05, |
| "loss": 3.7321, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217083354352798e-05, |
| "loss": 3.7259, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216244759601746e-05, |
| "loss": 3.738, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215406164850694e-05, |
| "loss": 3.7165, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214569207980015e-05, |
| "loss": 3.7393, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2137306132289637e-05, |
| "loss": 3.7331, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128920184779116e-05, |
| "loss": 3.737, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2120534237268596e-05, |
| "loss": 3.7307, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2112148289758076e-05, |
| "loss": 3.726, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2103778721051285e-05, |
| "loss": 3.7417, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2095392773540765e-05, |
| "loss": 3.7203, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2087006826030245e-05, |
| "loss": 3.7237, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2078620878519725e-05, |
| "loss": 3.7262, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2070251309812934e-05, |
| "loss": 3.7266, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2061865362302414e-05, |
| "loss": 3.7268, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2053479414791894e-05, |
| "loss": 3.7279, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2045093467281374e-05, |
| "loss": 3.7224, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203672389857459e-05, |
| "loss": 3.7249, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.202833795106407e-05, |
| "loss": 3.73, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201995200355355e-05, |
| "loss": 3.7235, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201156605604303e-05, |
| "loss": 3.7329, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200319648733624e-05, |
| "loss": 3.739, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199481053982572e-05, |
| "loss": 3.716, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19864245923152e-05, |
| "loss": 3.7175, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197803864480468e-05, |
| "loss": 3.7175, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196965269729416e-05, |
| "loss": 3.7401, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196128312858737e-05, |
| "loss": 3.722, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195289718107685e-05, |
| "loss": 3.7204, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194451123356633e-05, |
| "loss": 3.7118, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193612528605581e-05, |
| "loss": 3.7224, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1927755717349024e-05, |
| "loss": 3.7203, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1919369769838504e-05, |
| "loss": 3.7168, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1910983822327984e-05, |
| "loss": 3.724, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902597874817464e-05, |
| "loss": 3.7233, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189422830611067e-05, |
| "loss": 3.7246, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188584235860015e-05, |
| "loss": 3.7316, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187745641108963e-05, |
| "loss": 3.7324, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186907046357911e-05, |
| "loss": 3.7271, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186070089487232e-05, |
| "loss": 3.7217, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18523149473618e-05, |
| "loss": 3.7234, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184392899985128e-05, |
| "loss": 3.7167, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183554305234076e-05, |
| "loss": 3.7386, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182717348363398e-05, |
| "loss": 3.7225, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181878753612346e-05, |
| "loss": 3.7273, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181040158861294e-05, |
| "loss": 3.7083, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180201564110242e-05, |
| "loss": 3.7274, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1793646072395626e-05, |
| "loss": 3.7047, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1785260124885106e-05, |
| "loss": 3.7236, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1776874177374586e-05, |
| "loss": 3.7218, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1768488229864066e-05, |
| "loss": 3.7223, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1760102282353546e-05, |
| "loss": 3.7187, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1751732713646755e-05, |
| "loss": 3.7278, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1743346766136235e-05, |
| "loss": 3.7123, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1734960818625715e-05, |
| "loss": 3.7172, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1726574871115195e-05, |
| "loss": 3.7231, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171820530240841e-05, |
| "loss": 3.708, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170981935489789e-05, |
| "loss": 3.7098, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170143340738737e-05, |
| "loss": 3.7198, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1693047459876844e-05, |
| "loss": 3.7127, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.168467789117006e-05, |
| "loss": 3.7349, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167629194365954e-05, |
| "loss": 3.7158, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166790599614902e-05, |
| "loss": 3.7127, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165952004863849e-05, |
| "loss": 3.7141, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165115047993171e-05, |
| "loss": 3.7218, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.164276453242119e-05, |
| "loss": 3.727, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163437858491067e-05, |
| "loss": 3.7115, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162599263740015e-05, |
| "loss": 3.723, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1617623068693365e-05, |
| "loss": 3.7054, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1609237121182845e-05, |
| "loss": 3.7055, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160085117367232e-05, |
| "loss": 3.7189, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15924652261618e-05, |
| "loss": 3.7095, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158407927865128e-05, |
| "loss": 3.7139, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1575709709944494e-05, |
| "loss": 3.7178, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156732376243397e-05, |
| "loss": 3.6948, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155893781492345e-05, |
| "loss": 3.7115, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155055186741293e-05, |
| "loss": 3.7299, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154218229870614e-05, |
| "loss": 3.7167, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153379635119562e-05, |
| "loss": 3.703, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15254104036851e-05, |
| "loss": 3.7053, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151702445617458e-05, |
| "loss": 3.7095, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150863850866406e-05, |
| "loss": 3.7198, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150026893995727e-05, |
| "loss": 3.7162, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149188299244675e-05, |
| "loss": 3.706, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148349704493623e-05, |
| "loss": 3.7106, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147511109742571e-05, |
| "loss": 3.7138, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146674152871892e-05, |
| "loss": 3.7133, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14583555812084e-05, |
| "loss": 3.708, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144996963369788e-05, |
| "loss": 3.7157, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144158368618736e-05, |
| "loss": 3.7112, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1433214117480576e-05, |
| "loss": 3.7148, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1424828169970056e-05, |
| "loss": 3.715, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1416442222459536e-05, |
| "loss": 3.6998, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1408056274949016e-05, |
| "loss": 3.711, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1399686706242225e-05, |
| "loss": 3.7164, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1391300758731705e-05, |
| "loss": 3.718, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1382914811221185e-05, |
| "loss": 3.7151, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1374528863710665e-05, |
| "loss": 3.7141, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1366142916200145e-05, |
| "loss": 3.7015, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1357773347493354e-05, |
| "loss": 3.701, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1349387399982834e-05, |
| "loss": 3.7122, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1341001452472314e-05, |
| "loss": 3.7263, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1332615504961794e-05, |
| "loss": 3.7188, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.132424593625501e-05, |
| "loss": 3.7088, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131585998874449e-05, |
| "loss": 3.7044, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.130747404123397e-05, |
| "loss": 3.7164, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129908809372345e-05, |
| "loss": 3.7087, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129071852501666e-05, |
| "loss": 3.7085, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.128233257750614e-05, |
| "loss": 3.7261, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.127394662999562e-05, |
| "loss": 3.7133, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.12655606824851e-05, |
| "loss": 3.7061, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125717473497458e-05, |
| "loss": 3.7135, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.908137559890747, |
| "eval_runtime": 567.1197, |
| "eval_samples_per_second": 672.858, |
| "eval_steps_per_second": 21.027, |
| "step": 534233 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.124880516626779e-05, |
| "loss": 3.7188, |
| "step": 534528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.124041921875727e-05, |
| "loss": 3.7168, |
| "step": 535040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.123203327124675e-05, |
| "loss": 3.7096, |
| "step": 535552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.122364732373623e-05, |
| "loss": 3.7064, |
| "step": 536064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1215277755029444e-05, |
| "loss": 3.7042, |
| "step": 536576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1206891807518923e-05, |
| "loss": 3.7054, |
| "step": 537088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1198505860008403e-05, |
| "loss": 3.705, |
| "step": 537600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.119011991249788e-05, |
| "loss": 3.6957, |
| "step": 538112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118175034379109e-05, |
| "loss": 3.7055, |
| "step": 538624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.117336439628057e-05, |
| "loss": 3.7173, |
| "step": 539136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.116497844877005e-05, |
| "loss": 3.7008, |
| "step": 539648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.115659250125953e-05, |
| "loss": 3.6999, |
| "step": 540160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.114820655374901e-05, |
| "loss": 3.713, |
| "step": 540672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.113983698504222e-05, |
| "loss": 3.6993, |
| "step": 541184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.11314510375317e-05, |
| "loss": 3.7112, |
| "step": 541696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.112306509002118e-05, |
| "loss": 3.6832, |
| "step": 542208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.111467914251067e-05, |
| "loss": 3.6847, |
| "step": 542720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.110630957380388e-05, |
| "loss": 3.7024, |
| "step": 543232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.109792362629336e-05, |
| "loss": 3.6968, |
| "step": 543744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108953767878284e-05, |
| "loss": 3.7213, |
| "step": 544256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108115173127232e-05, |
| "loss": 3.7023, |
| "step": 544768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1072782162565526e-05, |
| "loss": 3.7093, |
| "step": 545280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1064396215055006e-05, |
| "loss": 3.6959, |
| "step": 545792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1056010267544486e-05, |
| "loss": 3.7055, |
| "step": 546304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1047624320033966e-05, |
| "loss": 3.6908, |
| "step": 546816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1039254751327175e-05, |
| "loss": 3.7022, |
| "step": 547328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1030868803816655e-05, |
| "loss": 3.7063, |
| "step": 547840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1022482856306135e-05, |
| "loss": 3.7008, |
| "step": 548352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.101409690879562e-05, |
| "loss": 3.6925, |
| "step": 548864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.100572734008883e-05, |
| "loss": 3.6927, |
| "step": 549376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.099734139257831e-05, |
| "loss": 3.6831, |
| "step": 549888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098895544506779e-05, |
| "loss": 3.7051, |
| "step": 550400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098056949755727e-05, |
| "loss": 3.7019, |
| "step": 550912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097219992885048e-05, |
| "loss": 3.7018, |
| "step": 551424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.096381398133996e-05, |
| "loss": 3.701, |
| "step": 551936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.095542803382944e-05, |
| "loss": 3.7078, |
| "step": 552448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.094704208631892e-05, |
| "loss": 3.6997, |
| "step": 552960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.093867251761213e-05, |
| "loss": 3.6982, |
| "step": 553472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.093028657010161e-05, |
| "loss": 3.698, |
| "step": 553984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.092190062259109e-05, |
| "loss": 3.6977, |
| "step": 554496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0913514675080575e-05, |
| "loss": 3.6977, |
| "step": 555008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0905145106373784e-05, |
| "loss": 3.6836, |
| "step": 555520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0896759158863264e-05, |
| "loss": 3.7079, |
| "step": 556032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0888373211352744e-05, |
| "loss": 3.6969, |
| "step": 556544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.087998726384222e-05, |
| "loss": 3.7048, |
| "step": 557056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0871617695135433e-05, |
| "loss": 3.6983, |
| "step": 557568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0863231747624913e-05, |
| "loss": 3.6891, |
| "step": 558080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.085484580011439e-05, |
| "loss": 3.7089, |
| "step": 558592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0846459852603867e-05, |
| "loss": 3.6848, |
| "step": 559104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.083809028389708e-05, |
| "loss": 3.6924, |
| "step": 559616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082970433638656e-05, |
| "loss": 3.6873, |
| "step": 560128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082131838887604e-05, |
| "loss": 3.6976, |
| "step": 560640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.081293244136552e-05, |
| "loss": 3.6929, |
| "step": 561152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.080456287265874e-05, |
| "loss": 3.6941, |
| "step": 561664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.079617692514822e-05, |
| "loss": 3.6915, |
| "step": 562176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.078779097763769e-05, |
| "loss": 3.6898, |
| "step": 562688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077940503012717e-05, |
| "loss": 3.6951, |
| "step": 563200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077103546142039e-05, |
| "loss": 3.6907, |
| "step": 563712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.076264951390987e-05, |
| "loss": 3.7007, |
| "step": 564224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.075426356639934e-05, |
| "loss": 3.7005, |
| "step": 564736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.074587761888882e-05, |
| "loss": 3.6867, |
| "step": 565248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0737508050182036e-05, |
| "loss": 3.6799, |
| "step": 565760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0729122102671516e-05, |
| "loss": 3.6867, |
| "step": 566272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0720736155160996e-05, |
| "loss": 3.7034, |
| "step": 566784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0712350207650476e-05, |
| "loss": 3.6915, |
| "step": 567296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0703964260139956e-05, |
| "loss": 3.6844, |
| "step": 567808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0695594691433165e-05, |
| "loss": 3.6832, |
| "step": 568320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0687208743922645e-05, |
| "loss": 3.6902, |
| "step": 568832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0678822796412125e-05, |
| "loss": 3.6831, |
| "step": 569344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0670436848901605e-05, |
| "loss": 3.6878, |
| "step": 569856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0662067280194814e-05, |
| "loss": 3.6891, |
| "step": 570368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0653681332684294e-05, |
| "loss": 3.6859, |
| "step": 570880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0645295385173774e-05, |
| "loss": 3.6926, |
| "step": 571392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0636909437663254e-05, |
| "loss": 3.6994, |
| "step": 571904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.062853986895647e-05, |
| "loss": 3.6983, |
| "step": 572416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.062015392144595e-05, |
| "loss": 3.6911, |
| "step": 572928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.061176797393543e-05, |
| "loss": 3.6898, |
| "step": 573440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.060338202642491e-05, |
| "loss": 3.6914, |
| "step": 573952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.059501245771812e-05, |
| "loss": 3.6849, |
| "step": 574464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.05866265102076e-05, |
| "loss": 3.7025, |
| "step": 574976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.057824056269708e-05, |
| "loss": 3.6953, |
| "step": 575488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056985461518656e-05, |
| "loss": 3.6943, |
| "step": 576000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056148504647977e-05, |
| "loss": 3.6747, |
| "step": 576512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.055309909896925e-05, |
| "loss": 3.6901, |
| "step": 577024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.054471315145873e-05, |
| "loss": 3.6754, |
| "step": 577536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.053632720394821e-05, |
| "loss": 3.6882, |
| "step": 578048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.052795763524142e-05, |
| "loss": 3.6902, |
| "step": 578560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.05195716877309e-05, |
| "loss": 3.6868, |
| "step": 579072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051118574022038e-05, |
| "loss": 3.6896, |
| "step": 579584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.050279979270986e-05, |
| "loss": 3.6904, |
| "step": 580096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.049441384519934e-05, |
| "loss": 3.6806, |
| "step": 580608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.048604427649255e-05, |
| "loss": 3.6856, |
| "step": 581120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.047765832898203e-05, |
| "loss": 3.6905, |
| "step": 581632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.046927238147151e-05, |
| "loss": 3.6776, |
| "step": 582144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.046088643396099e-05, |
| "loss": 3.6796, |
| "step": 582656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.04525168652542e-05, |
| "loss": 3.6794, |
| "step": 583168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.044413091774368e-05, |
| "loss": 3.6871, |
| "step": 583680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.043574497023316e-05, |
| "loss": 3.7023, |
| "step": 584192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.042737540152638e-05, |
| "loss": 3.6809, |
| "step": 584704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.041898945401586e-05, |
| "loss": 3.6843, |
| "step": 585216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.041060350650534e-05, |
| "loss": 3.681, |
| "step": 585728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.040221755899482e-05, |
| "loss": 3.6926, |
| "step": 586240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0393847990288026e-05, |
| "loss": 3.6898, |
| "step": 586752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0385462042777506e-05, |
| "loss": 3.6788, |
| "step": 587264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0377076095266986e-05, |
| "loss": 3.6924, |
| "step": 587776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0368690147756466e-05, |
| "loss": 3.6709, |
| "step": 588288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0360304200245946e-05, |
| "loss": 3.6728, |
| "step": 588800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0351934631539155e-05, |
| "loss": 3.6866, |
| "step": 589312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0343548684028635e-05, |
| "loss": 3.6796, |
| "step": 589824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0335162736518115e-05, |
| "loss": 3.6839, |
| "step": 590336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0326776789007595e-05, |
| "loss": 3.6849, |
| "step": 590848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031840722030081e-05, |
| "loss": 3.6605, |
| "step": 591360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031002127279029e-05, |
| "loss": 3.6829, |
| "step": 591872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030163532527977e-05, |
| "loss": 3.6956, |
| "step": 592384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.029324937776925e-05, |
| "loss": 3.6837, |
| "step": 592896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.028487980906246e-05, |
| "loss": 3.6729, |
| "step": 593408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.027649386155194e-05, |
| "loss": 3.6734, |
| "step": 593920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.026810791404142e-05, |
| "loss": 3.6748, |
| "step": 594432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.02597219665309e-05, |
| "loss": 3.6875, |
| "step": 594944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025135239782411e-05, |
| "loss": 3.6861, |
| "step": 595456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.024296645031359e-05, |
| "loss": 3.6736, |
| "step": 595968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.023458050280307e-05, |
| "loss": 3.6763, |
| "step": 596480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.022619455529255e-05, |
| "loss": 3.6889, |
| "step": 596992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0217824986585764e-05, |
| "loss": 3.6821, |
| "step": 597504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0209439039075244e-05, |
| "loss": 3.673, |
| "step": 598016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0201053091564724e-05, |
| "loss": 3.6871, |
| "step": 598528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0192667144054204e-05, |
| "loss": 3.6796, |
| "step": 599040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.018429757534741e-05, |
| "loss": 3.6788, |
| "step": 599552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.017591162783689e-05, |
| "loss": 3.6897, |
| "step": 600064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.016752568032637e-05, |
| "loss": 3.6664, |
| "step": 600576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.015913973281585e-05, |
| "loss": 3.679, |
| "step": 601088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.015077016410906e-05, |
| "loss": 3.6876, |
| "step": 601600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.014238421659854e-05, |
| "loss": 3.6835, |
| "step": 602112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.013399826908802e-05, |
| "loss": 3.6812, |
| "step": 602624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.01256123215775e-05, |
| "loss": 3.6853, |
| "step": 603136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.011724275287072e-05, |
| "loss": 3.6724, |
| "step": 603648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.01088568053602e-05, |
| "loss": 3.6704, |
| "step": 604160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010047085784968e-05, |
| "loss": 3.6821, |
| "step": 604672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.009208491033916e-05, |
| "loss": 3.6899, |
| "step": 605184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.008371534163237e-05, |
| "loss": 3.6901, |
| "step": 605696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.007532939412185e-05, |
| "loss": 3.6769, |
| "step": 606208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.006694344661133e-05, |
| "loss": 3.6738, |
| "step": 606720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005855749910081e-05, |
| "loss": 3.6854, |
| "step": 607232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0050187930394016e-05, |
| "loss": 3.6753, |
| "step": 607744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0041801982883496e-05, |
| "loss": 3.6801, |
| "step": 608256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0033416035372976e-05, |
| "loss": 3.691, |
| "step": 608768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0025030087862456e-05, |
| "loss": 3.688, |
| "step": 609280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.001666051915567e-05, |
| "loss": 3.6735, |
| "step": 609792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.000827457164515e-05, |
| "loss": 3.6846, |
| "step": 610304 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9025914669036865, |
| "eval_runtime": 586.0986, |
| "eval_samples_per_second": 651.07, |
| "eval_steps_per_second": 20.346, |
| "step": 610552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999988862413463e-05, |
| "loss": 3.6834, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999150267662411e-05, |
| "loss": 3.6901, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998311672911359e-05, |
| "loss": 3.6763, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9974730781603065e-05, |
| "loss": 3.6787, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.996636121289628e-05, |
| "loss": 3.6713, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995797526538576e-05, |
| "loss": 3.6764, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994958931787524e-05, |
| "loss": 3.6795, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9941203370364714e-05, |
| "loss": 3.6604, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993283380165793e-05, |
| "loss": 3.6737, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.992444785414741e-05, |
| "loss": 3.6868, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991606190663689e-05, |
| "loss": 3.6722, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990767595912637e-05, |
| "loss": 3.6679, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9899306390419585e-05, |
| "loss": 3.6827, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9890920442909065e-05, |
| "loss": 3.67, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.988253449539854e-05, |
| "loss": 3.6789, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.987414854788802e-05, |
| "loss": 3.6548, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865778979181234e-05, |
| "loss": 3.653, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9857393031670714e-05, |
| "loss": 3.6743, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984900708416019e-05, |
| "loss": 3.6653, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984062113664967e-05, |
| "loss": 3.6917, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983225156794288e-05, |
| "loss": 3.6733, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982386562043236e-05, |
| "loss": 3.6795, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981547967292184e-05, |
| "loss": 3.6655, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980709372541132e-05, |
| "loss": 3.6779, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979872415670454e-05, |
| "loss": 3.6642, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979033820919401e-05, |
| "loss": 3.6721, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978195226168349e-05, |
| "loss": 3.6683, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977356631417297e-05, |
| "loss": 3.6753, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976519674546619e-05, |
| "loss": 3.6668, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975681079795566e-05, |
| "loss": 3.6601, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974842485044514e-05, |
| "loss": 3.6553, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974003890293462e-05, |
| "loss": 3.6725, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973166933422784e-05, |
| "loss": 3.6734, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.972328338671732e-05, |
| "loss": 3.6716, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.97148974392068e-05, |
| "loss": 3.6708, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.970651149169628e-05, |
| "loss": 3.6751, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9698141922989486e-05, |
| "loss": 3.6708, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9689755975478966e-05, |
| "loss": 3.6701, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9681370027968446e-05, |
| "loss": 3.6675, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.967300045926166e-05, |
| "loss": 3.6665, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9664614511751135e-05, |
| "loss": 3.6707, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9656228564240615e-05, |
| "loss": 3.6554, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9647842616730095e-05, |
| "loss": 3.6725, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963947304802331e-05, |
| "loss": 3.6728, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9631087100512784e-05, |
| "loss": 3.6726, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.962270115300227e-05, |
| "loss": 3.672, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.961431520549175e-05, |
| "loss": 3.6567, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960594563678496e-05, |
| "loss": 3.6795, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959755968927444e-05, |
| "loss": 3.658, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958917374176392e-05, |
| "loss": 3.6641, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95807877942534e-05, |
| "loss": 3.6566, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957241822554661e-05, |
| "loss": 3.6643, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956403227803609e-05, |
| "loss": 3.6699, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955564633052557e-05, |
| "loss": 3.6574, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954726038301505e-05, |
| "loss": 3.666, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953889081430826e-05, |
| "loss": 3.6578, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953050486679774e-05, |
| "loss": 3.6701, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9522118919287224e-05, |
| "loss": 3.66, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9513732971776704e-05, |
| "loss": 3.6732, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.950536340306991e-05, |
| "loss": 3.6674, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949697745555939e-05, |
| "loss": 3.6599, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948859150804887e-05, |
| "loss": 3.6545, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948020556053835e-05, |
| "loss": 3.659, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.947183599183156e-05, |
| "loss": 3.6695, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.946345004432104e-05, |
| "loss": 3.6617, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.945506409681052e-05, |
| "loss": 3.6592, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.94466781493e-05, |
| "loss": 3.6564, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.943829220178948e-05, |
| "loss": 3.6582, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942992263308269e-05, |
| "loss": 3.6567, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942153668557218e-05, |
| "loss": 3.6591, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.941315073806166e-05, |
| "loss": 3.658, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.940476479055114e-05, |
| "loss": 3.6591, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.939639522184435e-05, |
| "loss": 3.6616, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.938800927433383e-05, |
| "loss": 3.675, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937962332682331e-05, |
| "loss": 3.666, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937123737931279e-05, |
| "loss": 3.6612, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9362867810605996e-05, |
| "loss": 3.6625, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9354481863095476e-05, |
| "loss": 3.6616, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9346095915584956e-05, |
| "loss": 3.6533, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9337709968074436e-05, |
| "loss": 3.6714, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9329340399367645e-05, |
| "loss": 3.6679, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932095445185713e-05, |
| "loss": 3.6663, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.931256850434661e-05, |
| "loss": 3.6478, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.930419893563982e-05, |
| "loss": 3.6601, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92958129881293e-05, |
| "loss": 3.6465, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928742704061878e-05, |
| "loss": 3.6646, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927904109310826e-05, |
| "loss": 3.6624, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927067152440147e-05, |
| "loss": 3.6556, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926228557689095e-05, |
| "loss": 3.6619, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925389962938043e-05, |
| "loss": 3.6627, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924551368186991e-05, |
| "loss": 3.6539, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923714411316312e-05, |
| "loss": 3.6558, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.92287581656526e-05, |
| "loss": 3.6623, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9220372218142085e-05, |
| "loss": 3.6501, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9211986270631565e-05, |
| "loss": 3.6543, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9203616701924774e-05, |
| "loss": 3.6501, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9195230754414254e-05, |
| "loss": 3.6578, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9186844806903734e-05, |
| "loss": 3.6722, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9178458859393214e-05, |
| "loss": 3.655, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.917008929068642e-05, |
| "loss": 3.6556, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.91617033431759e-05, |
| "loss": 3.6549, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.915331739566538e-05, |
| "loss": 3.6619, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914493144815486e-05, |
| "loss": 3.6617, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.913656187944807e-05, |
| "loss": 3.651, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912817593193755e-05, |
| "loss": 3.6648, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911978998442704e-05, |
| "loss": 3.6426, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911140403691652e-05, |
| "loss": 3.6479, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910303446820973e-05, |
| "loss": 3.6596, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909464852069921e-05, |
| "loss": 3.6557, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908626257318869e-05, |
| "loss": 3.6514, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907787662567817e-05, |
| "loss": 3.6577, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906950705697138e-05, |
| "loss": 3.6332, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906112110946086e-05, |
| "loss": 3.6577, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.905273516195034e-05, |
| "loss": 3.6636, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.904434921443982e-05, |
| "loss": 3.657, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035979645733026e-05, |
| "loss": 3.6453, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9027593698222506e-05, |
| "loss": 3.6507, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.901920775071199e-05, |
| "loss": 3.6437, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.901082180320147e-05, |
| "loss": 3.6611, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900245223449468e-05, |
| "loss": 3.6553, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.899406628698416e-05, |
| "loss": 3.6498, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.898568033947364e-05, |
| "loss": 3.6485, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.897729439196312e-05, |
| "loss": 3.6606, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896892482325633e-05, |
| "loss": 3.6545, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896053887574581e-05, |
| "loss": 3.6476, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895215292823529e-05, |
| "loss": 3.6523, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894376698072477e-05, |
| "loss": 3.6548, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.893539741201798e-05, |
| "loss": 3.6516, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892701146450746e-05, |
| "loss": 3.6587, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891862551699694e-05, |
| "loss": 3.6424, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8910239569486426e-05, |
| "loss": 3.6517, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8901870000779635e-05, |
| "loss": 3.6611, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8893484053269115e-05, |
| "loss": 3.6536, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8885098105758595e-05, |
| "loss": 3.6545, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8876712158248075e-05, |
| "loss": 3.6551, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8868342589541284e-05, |
| "loss": 3.6485, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8859956642030764e-05, |
| "loss": 3.6434, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8851570694520244e-05, |
| "loss": 3.6514, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8843184747009724e-05, |
| "loss": 3.6667, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.883481517830293e-05, |
| "loss": 3.6617, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882642923079241e-05, |
| "loss": 3.6493, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.881804328328189e-05, |
| "loss": 3.6511, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880965733577138e-05, |
| "loss": 3.6541, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880128776706459e-05, |
| "loss": 3.6502, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.879290181955407e-05, |
| "loss": 3.6542, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.878451587204355e-05, |
| "loss": 3.6598, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.877612992453303e-05, |
| "loss": 3.6605, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876774397702251e-05, |
| "loss": 3.6474, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.875935802951198e-05, |
| "loss": 3.6569, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.899163246154785, |
| "eval_runtime": 326.7628, |
| "eval_samples_per_second": 1167.792, |
| "eval_steps_per_second": 36.494, |
| "step": 686871 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.875097208200146e-05, |
| "loss": 3.6499, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.874260251329468e-05, |
| "loss": 3.6632, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873421656578416e-05, |
| "loss": 3.6515, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872583061827363e-05, |
| "loss": 3.6497, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871744467076312e-05, |
| "loss": 3.6459, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8709075102056333e-05, |
| "loss": 3.6476, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870068915454581e-05, |
| "loss": 3.6516, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8692303207035287e-05, |
| "loss": 3.6366, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8683917259524766e-05, |
| "loss": 3.6433, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.867554769081798e-05, |
| "loss": 3.6615, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8667161743307456e-05, |
| "loss": 3.6437, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8658775795796935e-05, |
| "loss": 3.6393, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8650389848286415e-05, |
| "loss": 3.6592, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864202027957963e-05, |
| "loss": 3.643, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8633634332069105e-05, |
| "loss": 3.6522, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8625248384558584e-05, |
| "loss": 3.6296, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.861686243704807e-05, |
| "loss": 3.6274, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.860849286834128e-05, |
| "loss": 3.6474, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.860010692083076e-05, |
| "loss": 3.639, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859172097332024e-05, |
| "loss": 3.6637, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.858333502580972e-05, |
| "loss": 3.6456, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.857496545710293e-05, |
| "loss": 3.6581, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.856657950959241e-05, |
| "loss": 3.6395, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.855819356208189e-05, |
| "loss": 3.6465, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8549823993375105e-05, |
| "loss": 3.6364, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854143804586458e-05, |
| "loss": 3.6465, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.853305209835406e-05, |
| "loss": 3.6445, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852466615084354e-05, |
| "loss": 3.6492, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8516280203333025e-05, |
| "loss": 3.6404, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8507894255822505e-05, |
| "loss": 3.6338, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8499508308311985e-05, |
| "loss": 3.6281, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8491122360801465e-05, |
| "loss": 3.6473, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8482752792094674e-05, |
| "loss": 3.6446, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8474366844584154e-05, |
| "loss": 3.6456, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8465980897073634e-05, |
| "loss": 3.646, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8457594949563114e-05, |
| "loss": 3.6501, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.844922538085632e-05, |
| "loss": 3.6445, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84408394333458e-05, |
| "loss": 3.6428, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843245348583528e-05, |
| "loss": 3.6454, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842406753832476e-05, |
| "loss": 3.6422, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.841569796961798e-05, |
| "loss": 3.6416, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.840731202210746e-05, |
| "loss": 3.6359, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839892607459694e-05, |
| "loss": 3.6423, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839054012708642e-05, |
| "loss": 3.6491, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838217055837963e-05, |
| "loss": 3.645, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.837378461086911e-05, |
| "loss": 3.645, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836539866335859e-05, |
| "loss": 3.6306, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.835701271584807e-05, |
| "loss": 3.6516, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8348643147141276e-05, |
| "loss": 3.6341, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8340257199630756e-05, |
| "loss": 3.6381, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8331871252120236e-05, |
| "loss": 3.6327, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8323485304609716e-05, |
| "loss": 3.6337, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.831511573590293e-05, |
| "loss": 3.6498, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.830672978839241e-05, |
| "loss": 3.6314, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.829834384088189e-05, |
| "loss": 3.6425, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828995789337137e-05, |
| "loss": 3.6274, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828158832466458e-05, |
| "loss": 3.6436, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.827320237715406e-05, |
| "loss": 3.6352, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.826481642964354e-05, |
| "loss": 3.6476, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.825643048213302e-05, |
| "loss": 3.6458, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824806091342623e-05, |
| "loss": 3.6331, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823967496591571e-05, |
| "loss": 3.63, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823128901840519e-05, |
| "loss": 3.6316, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822290307089467e-05, |
| "loss": 3.639, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8214533502187886e-05, |
| "loss": 3.6409, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8206147554677366e-05, |
| "loss": 3.6337, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8197761607166846e-05, |
| "loss": 3.6313, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8189375659656326e-05, |
| "loss": 3.6341, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8181006090949535e-05, |
| "loss": 3.6277, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8172620143439015e-05, |
| "loss": 3.6346, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8164234195928495e-05, |
| "loss": 3.6323, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8155848248417975e-05, |
| "loss": 3.6311, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8147478679711184e-05, |
| "loss": 3.641, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8139092732200664e-05, |
| "loss": 3.6485, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8130706784690144e-05, |
| "loss": 3.6402, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8122320837179624e-05, |
| "loss": 3.6393, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.811395126847284e-05, |
| "loss": 3.6345, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.810556532096232e-05, |
| "loss": 3.6427, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.80971793734518e-05, |
| "loss": 3.6275, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808879342594128e-05, |
| "loss": 3.6432, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808042385723449e-05, |
| "loss": 3.6452, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.807203790972397e-05, |
| "loss": 3.642, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.806365196221345e-05, |
| "loss": 3.6254, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.805526601470293e-05, |
| "loss": 3.631, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804689644599614e-05, |
| "loss": 3.6198, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.803851049848562e-05, |
| "loss": 3.6416, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.80301245509751e-05, |
| "loss": 3.6365, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802173860346458e-05, |
| "loss": 3.6343, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8013369034757786e-05, |
| "loss": 3.6364, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800498308724727e-05, |
| "loss": 3.6378, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799659713973675e-05, |
| "loss": 3.6277, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798821119222623e-05, |
| "loss": 3.6299, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797984162351944e-05, |
| "loss": 3.6409, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797145567600892e-05, |
| "loss": 3.6225, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79630697284984e-05, |
| "loss": 3.6283, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.795468378098788e-05, |
| "loss": 3.6269, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.794631421228109e-05, |
| "loss": 3.6338, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793792826477057e-05, |
| "loss": 3.6461, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792954231726005e-05, |
| "loss": 3.6278, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792115636974953e-05, |
| "loss": 3.6336, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.791278680104274e-05, |
| "loss": 3.6314, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.790440085353223e-05, |
| "loss": 3.6373, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.789601490602171e-05, |
| "loss": 3.6348, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.788762895851119e-05, |
| "loss": 3.6296, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7879259389804396e-05, |
| "loss": 3.6347, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7870873442293876e-05, |
| "loss": 3.621, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7862487494783356e-05, |
| "loss": 3.6239, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785410154727283e-05, |
| "loss": 3.6342, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7845731978566045e-05, |
| "loss": 3.6306, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7837346031055525e-05, |
| "loss": 3.6264, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7828960083545005e-05, |
| "loss": 3.6336, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782057413603448e-05, |
| "loss": 3.6125, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7812204567327694e-05, |
| "loss": 3.6302, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.780381861981718e-05, |
| "loss": 3.6377, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.779543267230666e-05, |
| "loss": 3.6331, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7787046724796134e-05, |
| "loss": 3.6225, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777867715608935e-05, |
| "loss": 3.6238, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777029120857883e-05, |
| "loss": 3.6197, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.77619052610683e-05, |
| "loss": 3.6311, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.775351931355778e-05, |
| "loss": 3.6311, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7745149744851e-05, |
| "loss": 3.6308, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.773676379734048e-05, |
| "loss": 3.6176, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.772837784982995e-05, |
| "loss": 3.6398, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771999190231943e-05, |
| "loss": 3.626, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771162233361265e-05, |
| "loss": 3.6258, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7703236386102134e-05, |
| "loss": 3.6247, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.769485043859161e-05, |
| "loss": 3.632, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768648086988482e-05, |
| "loss": 3.6273, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76780949223743e-05, |
| "loss": 3.6319, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7669708974863776e-05, |
| "loss": 3.6204, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7661323027353256e-05, |
| "loss": 3.6279, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765295345864647e-05, |
| "loss": 3.6394, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.764456751113595e-05, |
| "loss": 3.6255, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7636181563625425e-05, |
| "loss": 3.6282, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7627795616114905e-05, |
| "loss": 3.635, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.761942604740812e-05, |
| "loss": 3.6236, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76110400998976e-05, |
| "loss": 3.6227, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.760265415238708e-05, |
| "loss": 3.6278, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.759426820487656e-05, |
| "loss": 3.6344, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758589863616978e-05, |
| "loss": 3.6406, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.757751268865925e-05, |
| "loss": 3.6246, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756912674114873e-05, |
| "loss": 3.6293, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756074079363821e-05, |
| "loss": 3.6266, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7552371224931426e-05, |
| "loss": 3.627, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75439852774209e-05, |
| "loss": 3.6309, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.753559932991038e-05, |
| "loss": 3.6346, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.752721338239986e-05, |
| "loss": 3.6362, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751882743488934e-05, |
| "loss": 3.6235, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7510441487378826e-05, |
| "loss": 3.6319, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.897075891494751, |
| "eval_runtime": 324.3395, |
| "eval_samples_per_second": 1176.517, |
| "eval_steps_per_second": 36.767, |
| "step": 763190 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7502055539868306e-05, |
| "loss": 3.6296, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7493669592357786e-05, |
| "loss": 3.6305, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7485283644847265e-05, |
| "loss": 3.6331, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7476897697336745e-05, |
| "loss": 3.6335, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7468528128629955e-05, |
| "loss": 3.6385, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7460142181119434e-05, |
| "loss": 3.6288, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7451756233608914e-05, |
| "loss": 3.6295, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7443370286098394e-05, |
| "loss": 3.626, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7435000717391603e-05, |
| "loss": 3.6261, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7426614769881083e-05, |
| "loss": 3.6359, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7418228822370563e-05, |
| "loss": 3.631, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740984287486004e-05, |
| "loss": 3.6272, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740147330615326e-05, |
| "loss": 3.6336, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.739308735864274e-05, |
| "loss": 3.619, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.738470141113222e-05, |
| "loss": 3.6295, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.73763154636217e-05, |
| "loss": 3.6145, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.736794589491491e-05, |
| "loss": 3.621, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735955994740439e-05, |
| "loss": 3.6238, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735117399989387e-05, |
| "loss": 3.6189, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.734278805238335e-05, |
| "loss": 3.6434, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.733441848367656e-05, |
| "loss": 3.6337, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.732603253616604e-05, |
| "loss": 3.6283, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.731764658865552e-05, |
| "loss": 3.6282, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7309260641145e-05, |
| "loss": 3.6312, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.730089107243821e-05, |
| "loss": 3.6166, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.729250512492769e-05, |
| "loss": 3.6314, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.728411917741717e-05, |
| "loss": 3.6299, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.727573322990665e-05, |
| "loss": 3.6103, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726736366119986e-05, |
| "loss": 3.6202, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.725897771368934e-05, |
| "loss": 3.619, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.725059176617882e-05, |
| "loss": 3.6293, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.72422058186683e-05, |
| "loss": 3.6247, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.723383624996151e-05, |
| "loss": 3.6234, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.722545030245099e-05, |
| "loss": 3.6331, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721706435494047e-05, |
| "loss": 3.6223, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720869478623368e-05, |
| "loss": 3.6288, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7200308838723167e-05, |
| "loss": 3.6315, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7191922891212647e-05, |
| "loss": 3.6182, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7183536943702126e-05, |
| "loss": 3.6194, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7175167374995336e-05, |
| "loss": 3.6226, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7166781427484816e-05, |
| "loss": 3.6147, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7158411858778025e-05, |
| "loss": 3.6311, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7150025911267505e-05, |
| "loss": 3.6277, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7141639963756985e-05, |
| "loss": 3.6272, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7133254016246464e-05, |
| "loss": 3.6236, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7124868068735944e-05, |
| "loss": 3.6233, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7116482121225424e-05, |
| "loss": 3.6246, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7108096173714904e-05, |
| "loss": 3.6276, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709972660500812e-05, |
| "loss": 3.5949, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.70913406574976e-05, |
| "loss": 3.6358, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.708297108879081e-05, |
| "loss": 3.6203, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.707458514128029e-05, |
| "loss": 3.6285, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.706619919376977e-05, |
| "loss": 3.626, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.705781324625925e-05, |
| "loss": 3.6201, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704942729874873e-05, |
| "loss": 3.6078, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704104135123821e-05, |
| "loss": 3.6217, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.703265540372768e-05, |
| "loss": 3.6299, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.70242858350209e-05, |
| "loss": 3.6267, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701589988751038e-05, |
| "loss": 3.6367, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700751393999986e-05, |
| "loss": 3.6173, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699912799248934e-05, |
| "loss": 3.601, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699077480258628e-05, |
| "loss": 3.6369, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698238885507576e-05, |
| "loss": 3.6137, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.697400290756524e-05, |
| "loss": 3.6075, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.696561696005472e-05, |
| "loss": 3.6217, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69572310125442e-05, |
| "loss": 3.6208, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694884506503368e-05, |
| "loss": 3.6123, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6940459117523156e-05, |
| "loss": 3.6102, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6932073170012636e-05, |
| "loss": 3.6082, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.692370360130585e-05, |
| "loss": 3.6061, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6915317653795325e-05, |
| "loss": 3.6309, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.690693170628481e-05, |
| "loss": 3.6218, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.689854575877429e-05, |
| "loss": 3.6193, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.689017619006751e-05, |
| "loss": 3.6154, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688179024255698e-05, |
| "loss": 3.6308, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.687340429504646e-05, |
| "loss": 3.6164, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.686501834753594e-05, |
| "loss": 3.6278, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.685663240002542e-05, |
| "loss": 3.6131, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.68482464525149e-05, |
| "loss": 3.613, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683987688380811e-05, |
| "loss": 3.6236, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683149093629759e-05, |
| "loss": 3.6292, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682310498878707e-05, |
| "loss": 3.6154, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681471904127655e-05, |
| "loss": 3.6244, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680633309376603e-05, |
| "loss": 3.608, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679794714625551e-05, |
| "loss": 3.6129, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6789561198744996e-05, |
| "loss": 3.6206, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6781175251234476e-05, |
| "loss": 3.6213, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6772805682527685e-05, |
| "loss": 3.611, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6764419735017165e-05, |
| "loss": 3.6112, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6756033787506645e-05, |
| "loss": 3.6166, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6747647839996125e-05, |
| "loss": 3.6166, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673929465009306e-05, |
| "loss": 3.6198, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673090870258254e-05, |
| "loss": 3.6046, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672252275507202e-05, |
| "loss": 3.6203, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.67141368075615e-05, |
| "loss": 3.6258, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.670575086005098e-05, |
| "loss": 3.6207, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.669736491254046e-05, |
| "loss": 3.5929, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668897896502995e-05, |
| "loss": 3.6126, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668060939632316e-05, |
| "loss": 3.6131, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.667222344881264e-05, |
| "loss": 3.624, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.666383750130212e-05, |
| "loss": 3.6198, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.66554515537916e-05, |
| "loss": 3.6175, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664708198508481e-05, |
| "loss": 3.607, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663869603757429e-05, |
| "loss": 3.6164, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663031009006377e-05, |
| "loss": 3.6069, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.662192414255325e-05, |
| "loss": 3.6122, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.661355457384646e-05, |
| "loss": 3.6048, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.660516862633594e-05, |
| "loss": 3.6195, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.659678267882542e-05, |
| "loss": 3.6065, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6588396731314903e-05, |
| "loss": 3.603, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.658002716260811e-05, |
| "loss": 3.6024, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657164121509759e-05, |
| "loss": 3.6089, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656325526758707e-05, |
| "loss": 3.617, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655486932007655e-05, |
| "loss": 3.6071, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.654649975136976e-05, |
| "loss": 3.6143, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653811380385924e-05, |
| "loss": 3.6186, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652972785634872e-05, |
| "loss": 3.6047, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65213419088382e-05, |
| "loss": 3.6157, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.651297234013141e-05, |
| "loss": 3.613, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.650458639262089e-05, |
| "loss": 3.6121, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.649620044511037e-05, |
| "loss": 3.6096, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.648781449759985e-05, |
| "loss": 3.602, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6479444928893066e-05, |
| "loss": 3.615, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6471058981382546e-05, |
| "loss": 3.608, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6462673033872026e-05, |
| "loss": 3.6143, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6454287086361506e-05, |
| "loss": 3.6128, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6445917517654715e-05, |
| "loss": 3.6094, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6437531570144195e-05, |
| "loss": 3.6117, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6429162001437404e-05, |
| "loss": 3.5955, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6420776053926884e-05, |
| "loss": 3.6206, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6412390106416364e-05, |
| "loss": 3.6027, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6404004158905844e-05, |
| "loss": 3.6266, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6395618211395324e-05, |
| "loss": 3.6034, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638724864268854e-05, |
| "loss": 3.6161, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637886269517802e-05, |
| "loss": 3.6169, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.63704767476675e-05, |
| "loss": 3.6035, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636209080015698e-05, |
| "loss": 3.6114, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.635370485264646e-05, |
| "loss": 3.6061, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.634531890513593e-05, |
| "loss": 3.6182, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633693295762541e-05, |
| "loss": 3.6185, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632854701011489e-05, |
| "loss": 3.6073, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632017744140811e-05, |
| "loss": 3.6141, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631179149389758e-05, |
| "loss": 3.6068, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.63034219251908e-05, |
| "loss": 3.6174, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.629503597768028e-05, |
| "loss": 3.6031, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.628665003016976e-05, |
| "loss": 3.6228, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.627826408265924e-05, |
| "loss": 3.6047, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.626987813514872e-05, |
| "loss": 3.6168, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.62614921876382e-05, |
| "loss": 3.6103, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.891366958618164, |
| "eval_runtime": 326.6234, |
| "eval_samples_per_second": 1168.291, |
| "eval_steps_per_second": 36.51, |
| "step": 839510 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.625310624012768e-05, |
| "loss": 3.6065, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.624472029261716e-05, |
| "loss": 3.6064, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6236350723910367e-05, |
| "loss": 3.6089, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6227964776399847e-05, |
| "loss": 3.612, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6219578828889326e-05, |
| "loss": 3.6186, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6211192881378806e-05, |
| "loss": 3.6037, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6202823312672016e-05, |
| "loss": 3.6061, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6194437365161495e-05, |
| "loss": 3.6023, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.618605141765098e-05, |
| "loss": 3.6072, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.617766547014046e-05, |
| "loss": 3.6112, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616929590143367e-05, |
| "loss": 3.6039, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616090995392315e-05, |
| "loss": 3.6061, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615252400641263e-05, |
| "loss": 3.6127, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.614413805890211e-05, |
| "loss": 3.5937, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.613576849019532e-05, |
| "loss": 3.603, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.61273825426848e-05, |
| "loss": 3.5938, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611899659517428e-05, |
| "loss": 3.5969, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611061064766376e-05, |
| "loss": 3.6021, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.610224107895697e-05, |
| "loss": 3.5933, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.609385513144645e-05, |
| "loss": 3.6169, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6085469183935936e-05, |
| "loss": 3.6114, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6077083236425416e-05, |
| "loss": 3.6035, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6068697288914896e-05, |
| "loss": 3.6065, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6060311341404376e-05, |
| "loss": 3.6047, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6051925393893856e-05, |
| "loss": 3.5934, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6043555825187065e-05, |
| "loss": 3.6075, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6035169877676545e-05, |
| "loss": 3.6038, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6026783930166025e-05, |
| "loss": 3.5898, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6018397982655505e-05, |
| "loss": 3.5958, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6010028413948714e-05, |
| "loss": 3.5978, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6001642466438194e-05, |
| "loss": 3.6054, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5993256518927674e-05, |
| "loss": 3.5999, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5984870571417154e-05, |
| "loss": 3.6029, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.597650100271037e-05, |
| "loss": 3.6077, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.596811505519985e-05, |
| "loss": 3.5959, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595972910768933e-05, |
| "loss": 3.6101, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595134316017881e-05, |
| "loss": 3.6091, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.594297359147202e-05, |
| "loss": 3.593, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59345876439615e-05, |
| "loss": 3.5978, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.592620169645098e-05, |
| "loss": 3.5975, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.591781574894046e-05, |
| "loss": 3.5926, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.590944618023367e-05, |
| "loss": 3.6049, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.590106023272315e-05, |
| "loss": 3.6028, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.589267428521263e-05, |
| "loss": 3.607, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.588428833770211e-05, |
| "loss": 3.6005, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.587590239019159e-05, |
| "loss": 3.6019, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.586751644268107e-05, |
| "loss": 3.5993, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.585913049517055e-05, |
| "loss": 3.602, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.585076092646376e-05, |
| "loss": 3.5761, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.584237497895324e-05, |
| "loss": 3.612, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.583400541024645e-05, |
| "loss": 3.5946, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.582561946273593e-05, |
| "loss": 3.6064, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.581723351522541e-05, |
| "loss": 3.6031, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.580884756771489e-05, |
| "loss": 3.5942, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5800461620204365e-05, |
| "loss": 3.5865, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5792075672693845e-05, |
| "loss": 3.5958, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5783689725183325e-05, |
| "loss": 3.6133, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5775303777672805e-05, |
| "loss": 3.597, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576693420896602e-05, |
| "loss": 3.6138, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57585482614555e-05, |
| "loss": 3.5971, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.575016231394498e-05, |
| "loss": 3.5761, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574179274523819e-05, |
| "loss": 3.6135, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5733423176531406e-05, |
| "loss": 3.5941, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5725037229020886e-05, |
| "loss": 3.5849, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5716651281510366e-05, |
| "loss": 3.5986, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.570826533399984e-05, |
| "loss": 3.5983, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569987938648932e-05, |
| "loss": 3.586, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.56914934389788e-05, |
| "loss": 3.5908, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.568310749146828e-05, |
| "loss": 3.5831, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5674737922761495e-05, |
| "loss": 3.5889, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5666351975250975e-05, |
| "loss": 3.6028, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5657966027740454e-05, |
| "loss": 3.5971, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5649580080229934e-05, |
| "loss": 3.5974, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5641210511523144e-05, |
| "loss": 3.5898, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5632824564012624e-05, |
| "loss": 3.6049, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5624438616502103e-05, |
| "loss": 3.5966, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5616052668991583e-05, |
| "loss": 3.6051, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.560766672148106e-05, |
| "loss": 3.5896, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559928077397054e-05, |
| "loss": 3.5883, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559091120526375e-05, |
| "loss": 3.6001, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558252525775323e-05, |
| "loss": 3.6083, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.557413931024271e-05, |
| "loss": 3.595, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55657533627322e-05, |
| "loss": 3.6028, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.555736741522168e-05, |
| "loss": 3.585, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554898146771116e-05, |
| "loss": 3.5898, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554059552020064e-05, |
| "loss": 3.5973, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553220957269012e-05, |
| "loss": 3.604, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.552384000398333e-05, |
| "loss": 3.5866, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.551545405647281e-05, |
| "loss": 3.5909, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.550706810896229e-05, |
| "loss": 3.5956, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.549868216145177e-05, |
| "loss": 3.594, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5490328971548706e-05, |
| "loss": 3.5973, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5481943024038186e-05, |
| "loss": 3.5817, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5473557076527666e-05, |
| "loss": 3.5962, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.546517112901715e-05, |
| "loss": 3.607, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.545678518150663e-05, |
| "loss": 3.5996, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.544839923399611e-05, |
| "loss": 3.5765, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.544001328648559e-05, |
| "loss": 3.5862, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.543162733897507e-05, |
| "loss": 3.5886, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.542325777026828e-05, |
| "loss": 3.6026, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.541487182275776e-05, |
| "loss": 3.601, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.540648587524724e-05, |
| "loss": 3.5935, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.539811630654045e-05, |
| "loss": 3.5853, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538973035902993e-05, |
| "loss": 3.5966, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538134441151941e-05, |
| "loss": 3.5827, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.537295846400889e-05, |
| "loss": 3.5916, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5364588895302106e-05, |
| "loss": 3.5873, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5356202947791586e-05, |
| "loss": 3.5931, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5347817000281066e-05, |
| "loss": 3.5882, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5339431052770546e-05, |
| "loss": 3.5786, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5331061484063755e-05, |
| "loss": 3.5841, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5322675536553235e-05, |
| "loss": 3.5817, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5314289589042715e-05, |
| "loss": 3.5981, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5305903641532195e-05, |
| "loss": 3.5851, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5297534072825404e-05, |
| "loss": 3.5926, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5289148125314884e-05, |
| "loss": 3.5925, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5280762177804364e-05, |
| "loss": 3.5856, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527239260909757e-05, |
| "loss": 3.5968, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.526400666158706e-05, |
| "loss": 3.5895, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.525562071407654e-05, |
| "loss": 3.5913, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.524723476656602e-05, |
| "loss": 3.5846, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.523886519785923e-05, |
| "loss": 3.5806, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.523047925034871e-05, |
| "loss": 3.5941, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522209330283819e-05, |
| "loss": 3.5868, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.521370735532767e-05, |
| "loss": 3.5938, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.520532140781714e-05, |
| "loss": 3.5916, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.519693546030662e-05, |
| "loss": 3.5882, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.51885495127961e-05, |
| "loss": 3.5851, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.518017994408932e-05, |
| "loss": 3.5785, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.51717939965788e-05, |
| "loss": 3.596, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.516340804906828e-05, |
| "loss": 3.5836, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.515502210155776e-05, |
| "loss": 3.6017, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.514663615404724e-05, |
| "loss": 3.5825, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.513826658534045e-05, |
| "loss": 3.5943, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512988063782993e-05, |
| "loss": 3.5947, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512149469031941e-05, |
| "loss": 3.5831, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.511310874280889e-05, |
| "loss": 3.5895, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5104739174102096e-05, |
| "loss": 3.5854, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5096353226591576e-05, |
| "loss": 3.5978, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5087967279081056e-05, |
| "loss": 3.5976, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5079581331570536e-05, |
| "loss": 3.5854, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5071195384060016e-05, |
| "loss": 3.5859, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5062809436549496e-05, |
| "loss": 3.5891, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.505443986784271e-05, |
| "loss": 3.5955, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.504605392033219e-05, |
| "loss": 3.5806, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.503766797282167e-05, |
| "loss": 3.6023, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502928202531115e-05, |
| "loss": 3.5855, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502089607780063e-05, |
| "loss": 3.593, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501251013029011e-05, |
| "loss": 3.5908, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.890727996826172, |
| "eval_runtime": 333.8747, |
| "eval_samples_per_second": 1142.917, |
| "eval_steps_per_second": 35.717, |
| "step": 915830 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 5.937823726752154e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|