| { |
| "best_metric": 3.906484603881836, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/transformer/1/checkpoints/checkpoint-305276", |
| "epoch": 0.025000278439663435, |
| "eval_steps": 10, |
| "global_step": 305276, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 11.0634, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8389, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.1855, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.9639, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8109, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.6897, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.5891, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5257, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4716, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.3921, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3531, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.3124, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 5.269, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.2094, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1808, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1489, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1096, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.076, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0555, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0279, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 4.9988, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.9703, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9581, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9289, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.919, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.8917, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8799, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8626, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.828, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.831, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.8005, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.7908, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.7795, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.97233456261715e-05, |
| "loss": 4.7732, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971495967866098e-05, |
| "loss": 4.7481, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 4.739, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969818778363994e-05, |
| "loss": 4.7382, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.7204, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.7121, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.6943, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6778, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96562908036948e-05, |
| "loss": 4.6707, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964790485618428e-05, |
| "loss": 4.6477, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 4.6663, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6397, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9622763392456454e-05, |
| "loss": 4.6409, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9614377444945934e-05, |
| "loss": 4.63, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9605991497435414e-05, |
| "loss": 4.6254, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9597605549924894e-05, |
| "loss": 4.6178, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95892359812181e-05, |
| "loss": 4.5949, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958086641251131e-05, |
| "loss": 4.5868, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957248046500079e-05, |
| "loss": 4.5768, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956409451749027e-05, |
| "loss": 4.5836, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955570856997975e-05, |
| "loss": 4.5639, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5733, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5479, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.5422, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5388, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951379521123089e-05, |
| "loss": 4.5331, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950540926372037e-05, |
| "loss": 4.5303, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949702331620985e-05, |
| "loss": 4.5119, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948863736869932e-05, |
| "loss": 4.5097, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9480267799992536e-05, |
| "loss": 4.5168, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9471881852482016e-05, |
| "loss": 4.4974, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946349590497149e-05, |
| "loss": 4.492, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945510995746097e-05, |
| "loss": 4.4907, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9446740388754185e-05, |
| "loss": 4.4901, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9438354441243665e-05, |
| "loss": 4.4692, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9429968493733145e-05, |
| "loss": 4.4815, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.4701, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941321297751584e-05, |
| "loss": 4.4594, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.940482703000532e-05, |
| "loss": 4.4649, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396441082494794e-05, |
| "loss": 4.4523, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.938807151378801e-05, |
| "loss": 4.4667, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.937968556627749e-05, |
| "loss": 4.4639, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.937129961876696e-05, |
| "loss": 4.455, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.936291367125644e-05, |
| "loss": 4.4379, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935452772374592e-05, |
| "loss": 4.4281, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.934615815503914e-05, |
| "loss": 4.4183, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933777220752861e-05, |
| "loss": 4.4319, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93293862600181e-05, |
| "loss": 4.4329, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932100031250758e-05, |
| "loss": 4.4279, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931261436499706e-05, |
| "loss": 4.4009, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930422841748654e-05, |
| "loss": 4.3998, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929584246997602e-05, |
| "loss": 4.4061, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928747290126923e-05, |
| "loss": 4.3943, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927908695375871e-05, |
| "loss": 4.399, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927070100624819e-05, |
| "loss": 4.3958, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926231505873767e-05, |
| "loss": 4.3963, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925392911122715e-05, |
| "loss": 4.3862, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924554316371663e-05, |
| "loss": 4.3824, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923715721620611e-05, |
| "loss": 4.3703, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922877126869559e-05, |
| "loss": 4.3851, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922041807879253e-05, |
| "loss": 4.3679, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921203213128201e-05, |
| "loss": 4.3549, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920364618377149e-05, |
| "loss": 4.3598, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919526023626097e-05, |
| "loss": 4.3723, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918687428875045e-05, |
| "loss": 4.3661, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917848834123993e-05, |
| "loss": 4.3613, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917010239372941e-05, |
| "loss": 4.3434, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916171644621889e-05, |
| "loss": 4.3586, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91533468775121e-05, |
| "loss": 4.3326, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914497730880531e-05, |
| "loss": 4.35, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913659136129479e-05, |
| "loss": 4.3526, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912820541378427e-05, |
| "loss": 4.3381, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911981946627375e-05, |
| "loss": 4.3293, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911143351876323e-05, |
| "loss": 4.3071, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910304757125272e-05, |
| "loss": 4.3225, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9094678002545926e-05, |
| "loss": 4.32, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9086292055035406e-05, |
| "loss": 4.3181, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9077906107524886e-05, |
| "loss": 4.3234, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9069520160014366e-05, |
| "loss": 4.3047, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9061150591307575e-05, |
| "loss": 4.3082, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9052764643797055e-05, |
| "loss": 4.3144, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9044378696286535e-05, |
| "loss": 4.3171, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9035992748776015e-05, |
| "loss": 4.3033, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9027606801265495e-05, |
| "loss": 4.302, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9019220853754975e-05, |
| "loss": 4.2961, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010851285048184e-05, |
| "loss": 4.2971, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900246533753767e-05, |
| "loss": 4.3029, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899409576883088e-05, |
| "loss": 4.2871, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.898570982132036e-05, |
| "loss": 4.2837, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.897732387380984e-05, |
| "loss": 4.2812, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896893792629932e-05, |
| "loss": 4.2788, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.89605519787888e-05, |
| "loss": 4.2816, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.895216603127827e-05, |
| "loss": 4.2814, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.894378008376775e-05, |
| "loss": 4.2859, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.893539413625723e-05, |
| "loss": 4.2625, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892702456755045e-05, |
| "loss": 4.2753, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891863862003992e-05, |
| "loss": 4.277, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891025267252941e-05, |
| "loss": 4.2661, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890186672501889e-05, |
| "loss": 4.2817, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889348077750837e-05, |
| "loss": 4.2644, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888511120880158e-05, |
| "loss": 4.2757, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887672526129106e-05, |
| "loss": 4.2663, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886833931378054e-05, |
| "loss": 4.2452, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885995336627002e-05, |
| "loss": 4.2462, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8851583797563226e-05, |
| "loss": 4.2491, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8843197850052706e-05, |
| "loss": 4.2565, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8834811902542186e-05, |
| "loss": 4.2572, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8826425955031666e-05, |
| "loss": 4.2582, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8818056386324875e-05, |
| "loss": 4.2472, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880967043881436e-05, |
| "loss": 4.2416, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880128449130384e-05, |
| "loss": 4.2359, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879289854379332e-05, |
| "loss": 4.2309, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878452897508653e-05, |
| "loss": 4.2393, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877614302757601e-05, |
| "loss": 4.2494, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876775708006549e-05, |
| "loss": 4.2434, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875937113255497e-05, |
| "loss": 4.2233, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875098518504445e-05, |
| "loss": 4.2263, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.191549301147461, |
| "eval_runtime": 619.6834, |
| "eval_samples_per_second": 615.784, |
| "eval_steps_per_second": 19.244, |
| "step": 76319 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.874261561633766e-05, |
| "loss": 4.2271, |
| "step": 76800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.873422966882714e-05, |
| "loss": 4.2236, |
| "step": 77312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.872584372131662e-05, |
| "loss": 4.2196, |
| "step": 77824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.87174577738061e-05, |
| "loss": 4.2196, |
| "step": 78336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.870907182629558e-05, |
| "loss": 4.2101, |
| "step": 78848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.870068587878506e-05, |
| "loss": 4.1946, |
| "step": 79360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.869229993127454e-05, |
| "loss": 4.2003, |
| "step": 79872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8683913983764027e-05, |
| "loss": 4.2123, |
| "step": 80384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8675528036253507e-05, |
| "loss": 4.2026, |
| "step": 80896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8667142088742986e-05, |
| "loss": 4.2049, |
| "step": 81408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.865875614123246e-05, |
| "loss": 4.2058, |
| "step": 81920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8650386572525676e-05, |
| "loss": 4.2061, |
| "step": 82432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8642017003818885e-05, |
| "loss": 4.1969, |
| "step": 82944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8633631056308365e-05, |
| "loss": 4.1974, |
| "step": 83456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8625245108797845e-05, |
| "loss": 4.1931, |
| "step": 83968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8616859161287324e-05, |
| "loss": 4.1777, |
| "step": 84480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8608473213776804e-05, |
| "loss": 4.186, |
| "step": 84992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.860008726626628e-05, |
| "loss": 4.1901, |
| "step": 85504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8591701318755764e-05, |
| "loss": 4.1885, |
| "step": 86016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.858333175004898e-05, |
| "loss": 4.1891, |
| "step": 86528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.857494580253846e-05, |
| "loss": 4.1768, |
| "step": 87040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.856655985502793e-05, |
| "loss": 4.1852, |
| "step": 87552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.855817390751741e-05, |
| "loss": 4.1732, |
| "step": 88064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.854980433881063e-05, |
| "loss": 4.1842, |
| "step": 88576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.854141839130011e-05, |
| "loss": 4.1749, |
| "step": 89088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.853303244378958e-05, |
| "loss": 4.1714, |
| "step": 89600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.852464649627906e-05, |
| "loss": 4.1768, |
| "step": 90112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.851627692757228e-05, |
| "loss": 4.1561, |
| "step": 90624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.850789098006175e-05, |
| "loss": 4.1743, |
| "step": 91136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849950503255123e-05, |
| "loss": 4.1566, |
| "step": 91648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849111908504072e-05, |
| "loss": 4.1625, |
| "step": 92160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8482749516333934e-05, |
| "loss": 4.1607, |
| "step": 92672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.847436356882341e-05, |
| "loss": 4.1687, |
| "step": 93184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.846597762131289e-05, |
| "loss": 4.1562, |
| "step": 93696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.845759167380237e-05, |
| "loss": 4.1538, |
| "step": 94208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.844920572629185e-05, |
| "loss": 4.1638, |
| "step": 94720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8440836157585056e-05, |
| "loss": 4.1531, |
| "step": 95232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8432450210074536e-05, |
| "loss": 4.1605, |
| "step": 95744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8424064262564016e-05, |
| "loss": 4.1453, |
| "step": 96256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8415678315053496e-05, |
| "loss": 4.1368, |
| "step": 96768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8407308746346705e-05, |
| "loss": 4.1395, |
| "step": 97280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8398922798836185e-05, |
| "loss": 4.133, |
| "step": 97792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839053685132567e-05, |
| "loss": 4.1497, |
| "step": 98304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.838215090381515e-05, |
| "loss": 4.137, |
| "step": 98816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.837378133510836e-05, |
| "loss": 4.1442, |
| "step": 99328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.836539538759784e-05, |
| "loss": 4.1377, |
| "step": 99840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8357025818891057e-05, |
| "loss": 4.1415, |
| "step": 100352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834863987138053e-05, |
| "loss": 4.1418, |
| "step": 100864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834025392387001e-05, |
| "loss": 4.1192, |
| "step": 101376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.833186797635949e-05, |
| "loss": 4.1246, |
| "step": 101888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.83234984076527e-05, |
| "loss": 4.1197, |
| "step": 102400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.831511246014218e-05, |
| "loss": 4.1356, |
| "step": 102912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.830672651263166e-05, |
| "loss": 4.1125, |
| "step": 103424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.829834056512114e-05, |
| "loss": 4.1358, |
| "step": 103936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8289954617610625e-05, |
| "loss": 4.1111, |
| "step": 104448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8281568670100105e-05, |
| "loss": 4.1101, |
| "step": 104960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8273182722589585e-05, |
| "loss": 4.1193, |
| "step": 105472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8264796775079065e-05, |
| "loss": 4.1143, |
| "step": 105984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8256427206372274e-05, |
| "loss": 4.1136, |
| "step": 106496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8248041258861754e-05, |
| "loss": 4.1071, |
| "step": 107008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8239655311351234e-05, |
| "loss": 4.1029, |
| "step": 107520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8231269363840714e-05, |
| "loss": 4.1159, |
| "step": 108032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.822289979513392e-05, |
| "loss": 4.104, |
| "step": 108544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.82145138476234e-05, |
| "loss": 4.1047, |
| "step": 109056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.820612790011288e-05, |
| "loss": 4.1037, |
| "step": 109568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.819774195260236e-05, |
| "loss": 4.1075, |
| "step": 110080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.818937238389558e-05, |
| "loss": 4.0874, |
| "step": 110592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.818098643638506e-05, |
| "loss": 4.1088, |
| "step": 111104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.817260048887454e-05, |
| "loss": 4.0949, |
| "step": 111616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.816421454136402e-05, |
| "loss": 4.097, |
| "step": 112128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.815584497265723e-05, |
| "loss": 4.103, |
| "step": 112640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.814745902514671e-05, |
| "loss": 4.094, |
| "step": 113152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813907307763619e-05, |
| "loss": 4.1096, |
| "step": 113664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813068713012567e-05, |
| "loss": 4.1144, |
| "step": 114176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.812231756141888e-05, |
| "loss": 4.1067, |
| "step": 114688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.811393161390836e-05, |
| "loss": 4.092, |
| "step": 115200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.810554566639784e-05, |
| "loss": 4.0846, |
| "step": 115712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8097176097691046e-05, |
| "loss": 4.0798, |
| "step": 116224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808879015018053e-05, |
| "loss": 4.0936, |
| "step": 116736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808040420267001e-05, |
| "loss": 4.1053, |
| "step": 117248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.807201825515949e-05, |
| "loss": 4.0948, |
| "step": 117760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.806363230764897e-05, |
| "loss": 4.0796, |
| "step": 118272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.805524636013845e-05, |
| "loss": 4.074, |
| "step": 118784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.804686041262793e-05, |
| "loss": 4.0859, |
| "step": 119296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803849084392114e-05, |
| "loss": 4.0728, |
| "step": 119808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803010489641062e-05, |
| "loss": 4.0812, |
| "step": 120320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.80217189489001e-05, |
| "loss": 4.0833, |
| "step": 120832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.801333300138958e-05, |
| "loss": 4.0882, |
| "step": 121344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.800494705387906e-05, |
| "loss": 4.0773, |
| "step": 121856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7996561106368535e-05, |
| "loss": 4.0783, |
| "step": 122368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7988175158858015e-05, |
| "loss": 4.0692, |
| "step": 122880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797980559015123e-05, |
| "loss": 4.0806, |
| "step": 123392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797141964264071e-05, |
| "loss": 4.0779, |
| "step": 123904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.796303369513019e-05, |
| "loss": 4.058, |
| "step": 124416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.795464774761967e-05, |
| "loss": 4.0683, |
| "step": 124928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.794626180010915e-05, |
| "loss": 4.0802, |
| "step": 125440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.793787585259863e-05, |
| "loss": 4.0854, |
| "step": 125952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792948990508811e-05, |
| "loss": 4.07, |
| "step": 126464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792110395757759e-05, |
| "loss": 4.0636, |
| "step": 126976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.79127343888708e-05, |
| "loss": 4.0766, |
| "step": 127488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.790434844136028e-05, |
| "loss": 4.0551, |
| "step": 128000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.789596249384976e-05, |
| "loss": 4.0702, |
| "step": 128512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.788757654633924e-05, |
| "loss": 4.0768, |
| "step": 129024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.787920697763245e-05, |
| "loss": 4.0672, |
| "step": 129536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7870821030121935e-05, |
| "loss": 4.0648, |
| "step": 130048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7862435082611415e-05, |
| "loss": 4.045, |
| "step": 130560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7854049135100895e-05, |
| "loss": 4.0495, |
| "step": 131072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7845679566394104e-05, |
| "loss": 4.0538, |
| "step": 131584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7837293618883584e-05, |
| "loss": 4.055, |
| "step": 132096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.782892405017679e-05, |
| "loss": 4.0618, |
| "step": 132608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.782053810266627e-05, |
| "loss": 4.05, |
| "step": 133120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.781215215515575e-05, |
| "loss": 4.046, |
| "step": 133632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.780376620764523e-05, |
| "loss": 4.0582, |
| "step": 134144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.779538026013471e-05, |
| "loss": 4.0647, |
| "step": 134656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.778699431262419e-05, |
| "loss": 4.0511, |
| "step": 135168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777860836511367e-05, |
| "loss": 4.0573, |
| "step": 135680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777023879640689e-05, |
| "loss": 4.0472, |
| "step": 136192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.776185284889637e-05, |
| "loss": 4.0509, |
| "step": 136704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.775346690138585e-05, |
| "loss": 4.0575, |
| "step": 137216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.774509733267906e-05, |
| "loss": 4.0408, |
| "step": 137728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.773671138516854e-05, |
| "loss": 4.042, |
| "step": 138240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.772832543765802e-05, |
| "loss": 4.0411, |
| "step": 138752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77199394901475e-05, |
| "loss": 4.0401, |
| "step": 139264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.771155354263698e-05, |
| "loss": 4.0433, |
| "step": 139776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.770316759512646e-05, |
| "loss": 4.0475, |
| "step": 140288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.769478164761594e-05, |
| "loss": 4.0522, |
| "step": 140800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.768639570010542e-05, |
| "loss": 4.0327, |
| "step": 141312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7678042510202355e-05, |
| "loss": 4.0393, |
| "step": 141824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.766965656269184e-05, |
| "loss": 4.0527, |
| "step": 142336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.766127061518132e-05, |
| "loss": 4.0378, |
| "step": 142848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.76528846676708e-05, |
| "loss": 4.0508, |
| "step": 143360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.764449872016028e-05, |
| "loss": 4.0371, |
| "step": 143872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.763612915145349e-05, |
| "loss": 4.0521, |
| "step": 144384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.762774320394297e-05, |
| "loss": 4.0418, |
| "step": 144896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761935725643245e-05, |
| "loss": 4.0255, |
| "step": 145408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761097130892193e-05, |
| "loss": 4.026, |
| "step": 145920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.760258536141141e-05, |
| "loss": 4.0288, |
| "step": 146432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.759419941390089e-05, |
| "loss": 4.0412, |
| "step": 146944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.75858298451941e-05, |
| "loss": 4.0375, |
| "step": 147456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.757744389768358e-05, |
| "loss": 4.0413, |
| "step": 147968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.756905795017306e-05, |
| "loss": 4.0337, |
| "step": 148480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.756067200266254e-05, |
| "loss": 4.028, |
| "step": 148992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.755228605515202e-05, |
| "loss": 4.0258, |
| "step": 149504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.75439001076415e-05, |
| "loss": 4.0199, |
| "step": 150016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.753551416013098e-05, |
| "loss": 4.0291, |
| "step": 150528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.752712821262046e-05, |
| "loss": 4.0427, |
| "step": 151040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751875864391367e-05, |
| "loss": 4.0292, |
| "step": 151552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751037269640315e-05, |
| "loss": 4.0234, |
| "step": 152064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.750198674889263e-05, |
| "loss": 4.0206, |
| "step": 152576 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.022192001342773, |
| "eval_runtime": 552.4226, |
| "eval_samples_per_second": 690.759, |
| "eval_steps_per_second": 21.587, |
| "step": 152638 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493617180185845e-05, |
| "loss": 4.0278, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748523123267532e-05, |
| "loss": 4.0254, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74768452851648e-05, |
| "loss": 4.0151, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746845933765428e-05, |
| "loss": 4.0183, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460073390143764e-05, |
| "loss": 4.0168, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451687442633244e-05, |
| "loss": 3.998, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443301495122724e-05, |
| "loss": 4.0059, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434915547612204e-05, |
| "loss": 4.0226, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426529600101684e-05, |
| "loss": 4.0044, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741816003139489e-05, |
| "loss": 4.0108, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740977408388437e-05, |
| "loss": 4.018, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740138813637385e-05, |
| "loss": 4.0152, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739300218886333e-05, |
| "loss": 4.0054, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738461624135281e-05, |
| "loss": 4.0104, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737624667264602e-05, |
| "loss": 4.0036, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73678607251355e-05, |
| "loss": 3.9934, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735947477762498e-05, |
| "loss": 4.0003, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735108883011446e-05, |
| "loss": 4.005, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734270288260394e-05, |
| "loss": 4.0065, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733431693509343e-05, |
| "loss": 4.0082, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732593098758291e-05, |
| "loss": 3.9975, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731754504007238e-05, |
| "loss": 4.0078, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730919185016933e-05, |
| "loss": 3.9952, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730080590265881e-05, |
| "loss": 4.0054, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729241995514829e-05, |
| "loss": 3.9987, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728403400763777e-05, |
| "loss": 3.9974, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727564806012725e-05, |
| "loss": 4.0045, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726726211261673e-05, |
| "loss": 3.9832, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258892543909936e-05, |
| "loss": 4.0004, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7250506596399416e-05, |
| "loss": 3.9868, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7242120648888896e-05, |
| "loss": 3.9908, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723373470137838e-05, |
| "loss": 3.9928, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225348753867856e-05, |
| "loss": 3.9964, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7216962806357336e-05, |
| "loss": 3.9919, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7208576858846816e-05, |
| "loss": 3.9846, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720020729014003e-05, |
| "loss": 3.9965, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191821342629505e-05, |
| "loss": 3.9881, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7183435395118985e-05, |
| "loss": 3.9991, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7175049447608465e-05, |
| "loss": 3.9835, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7166663500097945e-05, |
| "loss": 3.9723, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158277552587424e-05, |
| "loss": 3.9753, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7149907983880634e-05, |
| "loss": 3.9753, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714152203637012e-05, |
| "loss": 3.9939, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71331360888596e-05, |
| "loss": 3.9725, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712475014134908e-05, |
| "loss": 3.9874, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711638057264229e-05, |
| "loss": 3.9817, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710799462513177e-05, |
| "loss": 3.9793, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709960867762125e-05, |
| "loss": 3.9862, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709122273011073e-05, |
| "loss": 3.9636, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708283678260021e-05, |
| "loss": 3.9688, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707445083508969e-05, |
| "loss": 3.964, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70660812663829e-05, |
| "loss": 3.9841, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705769531887238e-05, |
| "loss": 3.9628, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704930937136186e-05, |
| "loss": 3.9816, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704092342385134e-05, |
| "loss": 3.9613, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032553855144554e-05, |
| "loss": 3.9605, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024167907634034e-05, |
| "loss": 3.9654, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015781960123514e-05, |
| "loss": 3.9631, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7007396012612994e-05, |
| "loss": 3.9687, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69990264439062e-05, |
| "loss": 3.9636, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699064049639568e-05, |
| "loss": 3.9515, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698225454888516e-05, |
| "loss": 3.968, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697386860137464e-05, |
| "loss": 3.9578, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696548265386412e-05, |
| "loss": 3.9631, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69570967063536e-05, |
| "loss": 3.9569, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694871075884308e-05, |
| "loss": 3.9655, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694032481133256e-05, |
| "loss": 3.9433, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693195524262577e-05, |
| "loss": 3.9665, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692356929511526e-05, |
| "loss": 3.9502, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691518334760474e-05, |
| "loss": 3.9581, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690679740009422e-05, |
| "loss": 3.9589, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689842783138743e-05, |
| "loss": 3.9552, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689004188387691e-05, |
| "loss": 3.9687, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688165593636639e-05, |
| "loss": 3.9752, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6873286367659596e-05, |
| "loss": 3.9698, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6864900420149076e-05, |
| "loss": 3.9583, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856514472638556e-05, |
| "loss": 3.948, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6848128525128036e-05, |
| "loss": 3.9396, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6839758956421245e-05, |
| "loss": 3.9599, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6831373008910725e-05, |
| "loss": 3.9666, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6822987061400205e-05, |
| "loss": 3.9627, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681460111388969e-05, |
| "loss": 3.9438, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68062315451829e-05, |
| "loss": 3.9415, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679784559767238e-05, |
| "loss": 3.9496, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678945965016186e-05, |
| "loss": 3.9371, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678107370265134e-05, |
| "loss": 3.9458, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6772687755140814e-05, |
| "loss": 3.9544, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6764301807630294e-05, |
| "loss": 3.9516, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755915860119774e-05, |
| "loss": 3.9472, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674754629141298e-05, |
| "loss": 3.9509, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673916034390246e-05, |
| "loss": 3.938, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673077439639194e-05, |
| "loss": 3.9527, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.672238844888143e-05, |
| "loss": 3.9451, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.671400250137091e-05, |
| "loss": 3.9347, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670563293266412e-05, |
| "loss": 3.9395, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66972469851536e-05, |
| "loss": 3.9502, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668886103764308e-05, |
| "loss": 3.9572, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668047509013256e-05, |
| "loss": 3.9419, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667210552142577e-05, |
| "loss": 3.9412, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.666371957391525e-05, |
| "loss": 3.9483, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665533362640473e-05, |
| "loss": 3.9304, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664694767889421e-05, |
| "loss": 3.9471, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663856173138369e-05, |
| "loss": 3.952, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66301921626769e-05, |
| "loss": 3.9406, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6621806215166383e-05, |
| "loss": 3.9465, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6613420267655863e-05, |
| "loss": 3.9239, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660503432014534e-05, |
| "loss": 3.9229, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659666475143855e-05, |
| "loss": 3.9323, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658827880392803e-05, |
| "loss": 3.9338, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657989285641751e-05, |
| "loss": 3.9424, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657150690890699e-05, |
| "loss": 3.9292, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656312096139647e-05, |
| "loss": 3.9248, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655475139268968e-05, |
| "loss": 3.9334, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654636544517916e-05, |
| "loss": 3.9438, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653797949766864e-05, |
| "loss": 3.936, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652959355015812e-05, |
| "loss": 3.9417, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65212076026476e-05, |
| "loss": 3.9267, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651283803394082e-05, |
| "loss": 3.9295, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65044520864303e-05, |
| "loss": 3.9427, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649606613891978e-05, |
| "loss": 3.9235, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648768019140926e-05, |
| "loss": 3.9264, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479310622702466e-05, |
| "loss": 3.9256, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470924675191946e-05, |
| "loss": 3.9232, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462538727681426e-05, |
| "loss": 3.9271, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6454152780170906e-05, |
| "loss": 3.9285, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6445766832660386e-05, |
| "loss": 3.9394, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6437397263953595e-05, |
| "loss": 3.9188, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6429011316443075e-05, |
| "loss": 3.9221, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6420625368932555e-05, |
| "loss": 3.9423, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6412239421422035e-05, |
| "loss": 3.9231, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640385347391152e-05, |
| "loss": 3.9371, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639548390520473e-05, |
| "loss": 3.9246, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638709795769421e-05, |
| "loss": 3.9394, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637871201018369e-05, |
| "loss": 3.9295, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637032606267317e-05, |
| "loss": 3.9158, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636195649396638e-05, |
| "loss": 3.9168, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635357054645586e-05, |
| "loss": 3.9169, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634518459894534e-05, |
| "loss": 3.9277, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633679865143482e-05, |
| "loss": 3.932, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632842908272803e-05, |
| "loss": 3.9291, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632004313521751e-05, |
| "loss": 3.925, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631165718770699e-05, |
| "loss": 3.918, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6303271240196475e-05, |
| "loss": 3.9183, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6294901671489684e-05, |
| "loss": 3.9094, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6286515723979164e-05, |
| "loss": 3.9212, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6278129776468644e-05, |
| "loss": 3.9325, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626974382895812e-05, |
| "loss": 3.9197, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.62613578814476e-05, |
| "loss": 3.9161, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625297193393708e-05, |
| "loss": 3.9134, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9479596614837646, |
| "eval_runtime": 568.4433, |
| "eval_samples_per_second": 671.291, |
| "eval_steps_per_second": 20.978, |
| "step": 228957 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.624460236523029e-05, |
| "loss": 3.9188, |
| "step": 229376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6236216417719766e-05, |
| "loss": 3.9229, |
| "step": 229888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6227830470209246e-05, |
| "loss": 3.9108, |
| "step": 230400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6219444522698726e-05, |
| "loss": 3.9123, |
| "step": 230912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.621105857518821e-05, |
| "loss": 3.9146, |
| "step": 231424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.620267262767769e-05, |
| "loss": 3.8934, |
| "step": 231936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.619428668016717e-05, |
| "loss": 3.9017, |
| "step": 232448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.618590073265665e-05, |
| "loss": 3.9158, |
| "step": 232960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.617753116394986e-05, |
| "loss": 3.9039, |
| "step": 233472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.616916159524307e-05, |
| "loss": 3.91, |
| "step": 233984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.616077564773255e-05, |
| "loss": 3.912, |
| "step": 234496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.615238970022203e-05, |
| "loss": 3.9079, |
| "step": 235008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.614400375271151e-05, |
| "loss": 3.9048, |
| "step": 235520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.613561780520099e-05, |
| "loss": 3.9116, |
| "step": 236032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.612723185769047e-05, |
| "loss": 3.8999, |
| "step": 236544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.611884591017995e-05, |
| "loss": 3.8963, |
| "step": 237056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.611045996266943e-05, |
| "loss": 3.8958, |
| "step": 237568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.610209039396265e-05, |
| "loss": 3.9042, |
| "step": 238080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.609370444645213e-05, |
| "loss": 3.9053, |
| "step": 238592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6085318498941607e-05, |
| "loss": 3.91, |
| "step": 239104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6076932551431087e-05, |
| "loss": 3.8984, |
| "step": 239616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6068546603920566e-05, |
| "loss": 3.9102, |
| "step": 240128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6060177035213776e-05, |
| "loss": 3.8989, |
| "step": 240640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6051791087703256e-05, |
| "loss": 3.9004, |
| "step": 241152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6043405140192735e-05, |
| "loss": 3.9019, |
| "step": 241664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6035019192682215e-05, |
| "loss": 3.8977, |
| "step": 242176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6026649623975425e-05, |
| "loss": 3.9052, |
| "step": 242688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6018263676464904e-05, |
| "loss": 3.8874, |
| "step": 243200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6009894107758114e-05, |
| "loss": 3.9054, |
| "step": 243712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.60015081602476e-05, |
| "loss": 3.8897, |
| "step": 244224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.599312221273708e-05, |
| "loss": 3.8926, |
| "step": 244736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.598473626522656e-05, |
| "loss": 3.8971, |
| "step": 245248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.597635031771604e-05, |
| "loss": 3.906, |
| "step": 245760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.596796437020552e-05, |
| "loss": 3.891, |
| "step": 246272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5959578422695e-05, |
| "loss": 3.8938, |
| "step": 246784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595119247518448e-05, |
| "loss": 3.8965, |
| "step": 247296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.594282290647769e-05, |
| "loss": 3.8934, |
| "step": 247808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.593443695896717e-05, |
| "loss": 3.9068, |
| "step": 248320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.592605101145665e-05, |
| "loss": 3.8901, |
| "step": 248832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.591766506394613e-05, |
| "loss": 3.8786, |
| "step": 249344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.59092791164356e-05, |
| "loss": 3.885, |
| "step": 249856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.590090954772882e-05, |
| "loss": 3.8778, |
| "step": 250368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.58925236002183e-05, |
| "loss": 3.9011, |
| "step": 250880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5884137652707785e-05, |
| "loss": 3.8845, |
| "step": 251392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.587575170519726e-05, |
| "loss": 3.8947, |
| "step": 251904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5867382136490474e-05, |
| "loss": 3.8882, |
| "step": 252416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5858996188979954e-05, |
| "loss": 3.8894, |
| "step": 252928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585061024146943e-05, |
| "loss": 3.8943, |
| "step": 253440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.584222429395891e-05, |
| "loss": 3.8742, |
| "step": 253952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.583383834644839e-05, |
| "loss": 3.8744, |
| "step": 254464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.58254687777416e-05, |
| "loss": 3.8791, |
| "step": 254976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5817082830231076e-05, |
| "loss": 3.8912, |
| "step": 255488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5808696882720556e-05, |
| "loss": 3.8724, |
| "step": 256000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5800310935210036e-05, |
| "loss": 3.8892, |
| "step": 256512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.579194136650325e-05, |
| "loss": 3.8728, |
| "step": 257024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.578355541899273e-05, |
| "loss": 3.8686, |
| "step": 257536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.577516947148221e-05, |
| "loss": 3.8823, |
| "step": 258048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.576678352397169e-05, |
| "loss": 3.8687, |
| "step": 258560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.57584139552649e-05, |
| "loss": 3.8855, |
| "step": 259072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.575002800775438e-05, |
| "loss": 3.8784, |
| "step": 259584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.574164206024386e-05, |
| "loss": 3.8631, |
| "step": 260096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.573325611273334e-05, |
| "loss": 3.8774, |
| "step": 260608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.572488654402655e-05, |
| "loss": 3.8699, |
| "step": 261120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.571650059651603e-05, |
| "loss": 3.8787, |
| "step": 261632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.570811464900551e-05, |
| "loss": 3.8667, |
| "step": 262144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569972870149499e-05, |
| "loss": 3.8787, |
| "step": 262656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5691342753984476e-05, |
| "loss": 3.8574, |
| "step": 263168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5682973185277685e-05, |
| "loss": 3.8785, |
| "step": 263680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5674587237767165e-05, |
| "loss": 3.8626, |
| "step": 264192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5666201290256645e-05, |
| "loss": 3.8695, |
| "step": 264704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5657815342746125e-05, |
| "loss": 3.8736, |
| "step": 265216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5649445774039334e-05, |
| "loss": 3.8725, |
| "step": 265728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5641059826528814e-05, |
| "loss": 3.8777, |
| "step": 266240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5632673879018294e-05, |
| "loss": 3.8907, |
| "step": 266752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5624287931507774e-05, |
| "loss": 3.884, |
| "step": 267264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5615901983997254e-05, |
| "loss": 3.8704, |
| "step": 267776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.560753241529046e-05, |
| "loss": 3.8659, |
| "step": 268288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559914646777994e-05, |
| "loss": 3.8593, |
| "step": 268800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.559076052026943e-05, |
| "loss": 3.874, |
| "step": 269312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.558237457275891e-05, |
| "loss": 3.8838, |
| "step": 269824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.557400500405212e-05, |
| "loss": 3.8786, |
| "step": 270336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.55656190565416e-05, |
| "loss": 3.8592, |
| "step": 270848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.555723310903108e-05, |
| "loss": 3.8583, |
| "step": 271360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554884716152056e-05, |
| "loss": 3.8709, |
| "step": 271872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554047759281377e-05, |
| "loss": 3.8495, |
| "step": 272384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.553209164530325e-05, |
| "loss": 3.865, |
| "step": 272896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.552370569779273e-05, |
| "loss": 3.8695, |
| "step": 273408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.551531975028221e-05, |
| "loss": 3.8695, |
| "step": 273920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.550693380277169e-05, |
| "loss": 3.8671, |
| "step": 274432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.54985642340649e-05, |
| "loss": 3.8718, |
| "step": 274944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5490178286554384e-05, |
| "loss": 3.8547, |
| "step": 275456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5481792339043864e-05, |
| "loss": 3.867, |
| "step": 275968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5473406391533343e-05, |
| "loss": 3.8626, |
| "step": 276480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5465020444022823e-05, |
| "loss": 3.8575, |
| "step": 276992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.545665087531603e-05, |
| "loss": 3.8564, |
| "step": 277504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.544826492780551e-05, |
| "loss": 3.8727, |
| "step": 278016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543987898029499e-05, |
| "loss": 3.8723, |
| "step": 278528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543149303278447e-05, |
| "loss": 3.8671, |
| "step": 279040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.542312346407768e-05, |
| "loss": 3.8592, |
| "step": 279552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.541473751656716e-05, |
| "loss": 3.8693, |
| "step": 280064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.540635156905664e-05, |
| "loss": 3.8535, |
| "step": 280576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.539796562154612e-05, |
| "loss": 3.8646, |
| "step": 281088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538959605283933e-05, |
| "loss": 3.8725, |
| "step": 281600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538121010532882e-05, |
| "loss": 3.859, |
| "step": 282112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.53728241578183e-05, |
| "loss": 3.8731, |
| "step": 282624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.536443821030778e-05, |
| "loss": 3.8412, |
| "step": 283136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5356068641600986e-05, |
| "loss": 3.8454, |
| "step": 283648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5347682694090466e-05, |
| "loss": 3.8549, |
| "step": 284160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5339296746579946e-05, |
| "loss": 3.856, |
| "step": 284672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5330910799069426e-05, |
| "loss": 3.8652, |
| "step": 285184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5322541230362635e-05, |
| "loss": 3.8502, |
| "step": 285696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5314155282852115e-05, |
| "loss": 3.8467, |
| "step": 286208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5305769335341595e-05, |
| "loss": 3.856, |
| "step": 286720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5297383387831075e-05, |
| "loss": 3.868, |
| "step": 287232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5288997440320555e-05, |
| "loss": 3.8599, |
| "step": 287744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.528062787161377e-05, |
| "loss": 3.8654, |
| "step": 288256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.527224192410325e-05, |
| "loss": 3.849, |
| "step": 288768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.526385597659273e-05, |
| "loss": 3.8518, |
| "step": 289280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.525547002908221e-05, |
| "loss": 3.8684, |
| "step": 289792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.524710046037542e-05, |
| "loss": 3.8491, |
| "step": 290304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.52387145128649e-05, |
| "loss": 3.8464, |
| "step": 290816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.523032856535438e-05, |
| "loss": 3.8499, |
| "step": 291328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.522194261784386e-05, |
| "loss": 3.8455, |
| "step": 291840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.521357304913707e-05, |
| "loss": 3.8545, |
| "step": 292352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.520518710162655e-05, |
| "loss": 3.8536, |
| "step": 292864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.519680115411603e-05, |
| "loss": 3.8655, |
| "step": 293376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.518841520660551e-05, |
| "loss": 3.8454, |
| "step": 293888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5180045637898725e-05, |
| "loss": 3.8429, |
| "step": 294400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5171659690388204e-05, |
| "loss": 3.8703, |
| "step": 294912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5163273742877684e-05, |
| "loss": 3.8464, |
| "step": 295424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.515488779536716e-05, |
| "loss": 3.8594, |
| "step": 295936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5146518226660373e-05, |
| "loss": 3.8551, |
| "step": 296448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5138132279149853e-05, |
| "loss": 3.8633, |
| "step": 296960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5129746331639333e-05, |
| "loss": 3.8553, |
| "step": 297472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5121360384128807e-05, |
| "loss": 3.846, |
| "step": 297984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5112974436618287e-05, |
| "loss": 3.8403, |
| "step": 298496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51046048679115e-05, |
| "loss": 3.8428, |
| "step": 299008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5096218920400976e-05, |
| "loss": 3.853, |
| "step": 299520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.508783297289046e-05, |
| "loss": 3.8612, |
| "step": 300032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507944702537994e-05, |
| "loss": 3.8532, |
| "step": 300544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.507107745667316e-05, |
| "loss": 3.8569, |
| "step": 301056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.506269150916263e-05, |
| "loss": 3.8424, |
| "step": 301568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.505430556165211e-05, |
| "loss": 3.8482, |
| "step": 302080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.504591961414159e-05, |
| "loss": 3.8351, |
| "step": 302592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.50375500454348e-05, |
| "loss": 3.8513, |
| "step": 303104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502916409792428e-05, |
| "loss": 3.8542, |
| "step": 303616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.502077815041376e-05, |
| "loss": 3.855, |
| "step": 304128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.501239220290324e-05, |
| "loss": 3.8434, |
| "step": 304640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.500402263419645e-05, |
| "loss": 3.8415, |
| "step": 305152 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.906484603881836, |
| "eval_runtime": 580.3968, |
| "eval_samples_per_second": 657.466, |
| "eval_steps_per_second": 20.546, |
| "step": 305276 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 2.122416208464814e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|