| { |
| "best_metric": 3.996596097946167, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/passive/lstm/0/checkpoints/checkpoint-1679040", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1679040, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8198, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.5522, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0566, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.995, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.9475, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.9066, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7464, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 6.6405, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993292879871958e-05, |
| "loss": 6.5391, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992454285120906e-05, |
| "loss": 6.457, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.991615690369854e-05, |
| "loss": 6.4004, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990777095618801e-05, |
| "loss": 6.3311, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 6.2652, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 6.1988, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 6.1445, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 6.0779, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 6.0424, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.9952, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.9566, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.9196, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.8897, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 5.8524, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 5.8257, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 5.785, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798786396158716e-05, |
| "loss": 5.7734, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 5.7366, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 5.7188, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 5.6818, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 5.6646, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 5.6459, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748470711095595e-05, |
| "loss": 5.6213, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974008476358507e-05, |
| "loss": 5.6129, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 5.5862, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 5.5697, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 5.5626, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706557352346724e-05, |
| "loss": 5.5386, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9698171404836204e-05, |
| "loss": 5.5296, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968980183612942e-05, |
| "loss": 5.4843, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96814158886189e-05, |
| "loss": 5.4893, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967302994110837e-05, |
| "loss": 5.4519, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966464399359785e-05, |
| "loss": 5.4571, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965625804608733e-05, |
| "loss": 5.4336, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964787209857681e-05, |
| "loss": 5.4322, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963948615106629e-05, |
| "loss": 5.4024, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963110020355577e-05, |
| "loss": 5.4078, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962273063484898e-05, |
| "loss": 5.3962, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.961434468733847e-05, |
| "loss": 5.3903, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960595873982795e-05, |
| "loss": 5.3811, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959757279231743e-05, |
| "loss": 5.3518, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958920322361064e-05, |
| "loss": 5.3426, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958081727610012e-05, |
| "loss": 5.3434, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95724313285896e-05, |
| "loss": 5.3293, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956404538107908e-05, |
| "loss": 5.324, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955567581237229e-05, |
| "loss": 5.2998, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954728986486177e-05, |
| "loss": 5.2981, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953890391735125e-05, |
| "loss": 5.2777, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530517969840727e-05, |
| "loss": 5.298, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522148401133936e-05, |
| "loss": 5.2524, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951376245362342e-05, |
| "loss": 5.2679, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95053765061129e-05, |
| "loss": 5.2567, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2381, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 5.2352, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948023504238507e-05, |
| "loss": 5.2224, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947184909487455e-05, |
| "loss": 5.2119, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946346314736403e-05, |
| "loss": 5.2041, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945507719985351e-05, |
| "loss": 5.2186, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944670763114672e-05, |
| "loss": 5.1918, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94383216836362e-05, |
| "loss": 5.1825, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942993573612568e-05, |
| "loss": 5.1742, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942154978861516e-05, |
| "loss": 5.1712, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941318021990837e-05, |
| "loss": 5.1775, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404794272397856e-05, |
| "loss": 5.1722, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396408324887336e-05, |
| "loss": 5.148, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388022377376816e-05, |
| "loss": 5.1631, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379636429866296e-05, |
| "loss": 5.1554, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371283239963234e-05, |
| "loss": 5.1293, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.936291367125644e-05, |
| "loss": 5.1272, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935452772374592e-05, |
| "loss": 5.1171, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93461417762354e-05, |
| "loss": 5.1092, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933775582872488e-05, |
| "loss": 5.1037, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932936988121436e-05, |
| "loss": 5.1087, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932098393370384e-05, |
| "loss": 5.1041, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931259798619332e-05, |
| "loss": 5.1052, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930421203868281e-05, |
| "loss": 5.0813, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 5.0642, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 5.0683, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 5.0714, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 5.0689, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 5.0524, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925389635361968e-05, |
| "loss": 5.0499, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92455267849129e-05, |
| "loss": 5.0496, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923714083740238e-05, |
| "loss": 5.0403, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922875488989185e-05, |
| "loss": 5.0311, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922036894238133e-05, |
| "loss": 5.0276, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921199937367455e-05, |
| "loss": 5.0154, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920361342616403e-05, |
| "loss": 5.0273, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919522747865351e-05, |
| "loss": 5.0144, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918684153114299e-05, |
| "loss": 4.9927, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917845558363247e-05, |
| "loss": 4.997, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9170086014925676e-05, |
| "loss": 4.9953, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9161700067415156e-05, |
| "loss": 5.0038, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9153314119904636e-05, |
| "loss": 4.9853, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914494455119785e-05, |
| "loss": 4.9823, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136558603687325e-05, |
| "loss": 4.9665, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128172656176805e-05, |
| "loss": 4.9707, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.9517, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.9615, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910303119244898e-05, |
| "loss": 4.95, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909464524493846e-05, |
| "loss": 4.9536, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908625929742794e-05, |
| "loss": 4.9399, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907787334991742e-05, |
| "loss": 4.9436, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90694874024069e-05, |
| "loss": 4.9305, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906110145489638e-05, |
| "loss": 4.9334, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905271550738586e-05, |
| "loss": 4.926, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.9192, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903595999116855e-05, |
| "loss": 4.9306, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902757404365803e-05, |
| "loss": 4.9147, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901920447495124e-05, |
| "loss": 4.9128, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901081852744072e-05, |
| "loss": 4.9028, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90024325799302e-05, |
| "loss": 4.9081, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994046632419686e-05, |
| "loss": 4.9029, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985660684909166e-05, |
| "loss": 4.8958, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977274737398646e-05, |
| "loss": 4.8996, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968888789888125e-05, |
| "loss": 4.8859, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960502842377605e-05, |
| "loss": 4.8855, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952116894867085e-05, |
| "loss": 4.8663, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943747326160294e-05, |
| "loss": 4.8723, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935361378649774e-05, |
| "loss": 4.8737, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926975431139254e-05, |
| "loss": 4.8637, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918589483628734e-05, |
| "loss": 4.8563, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910203536118214e-05, |
| "loss": 4.878, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8901833967411423e-05, |
| "loss": 4.8603, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88934480199009e-05, |
| "loss": 4.8522, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888506207239038e-05, |
| "loss": 4.8518, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887667612487986e-05, |
| "loss": 4.8461, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886829017736934e-05, |
| "loss": 4.8473, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885992060866256e-05, |
| "loss": 4.8439, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885153466115204e-05, |
| "loss": 4.8398, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884314871364151e-05, |
| "loss": 4.8412, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883476276613099e-05, |
| "loss": 4.8394, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882637681862047e-05, |
| "loss": 4.8276, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881800724991369e-05, |
| "loss": 4.8178, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880962130240316e-05, |
| "loss": 4.8285, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.8157, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879284940738212e-05, |
| "loss": 4.8194, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878447983867534e-05, |
| "loss": 4.81, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877609389116482e-05, |
| "loss": 4.8071, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876772432245803e-05, |
| "loss": 4.8133, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875933837494751e-05, |
| "loss": 4.7999, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8750952427436986e-05, |
| "loss": 4.7967, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.765356540679932, |
| "eval_runtime": 295.2513, |
| "eval_samples_per_second": 1292.428, |
| "eval_steps_per_second": 40.389, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8742566479926466e-05, |
| "loss": 4.785, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8734180532415946e-05, |
| "loss": 4.7835, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8725794584905426e-05, |
| "loss": 4.8069, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8717408637394906e-05, |
| "loss": 4.7913, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709022689884386e-05, |
| "loss": 4.7825, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700653121177595e-05, |
| "loss": 4.7796, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692267173667075e-05, |
| "loss": 4.7803, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683881226156555e-05, |
| "loss": 4.7619, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.867549527864604e-05, |
| "loss": 4.7708, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.866710933113552e-05, |
| "loss": 4.7703, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.7729, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865035381491821e-05, |
| "loss": 4.7724, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864198424621142e-05, |
| "loss": 4.754, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.86335982987009e-05, |
| "loss": 4.7539, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862521235119038e-05, |
| "loss": 4.7427, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.861682640367986e-05, |
| "loss": 4.7441, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860844045616934e-05, |
| "loss": 4.7515, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860005450865882e-05, |
| "loss": 4.7408, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.859168493995203e-05, |
| "loss": 4.7454, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858329899244151e-05, |
| "loss": 4.7588, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8574913044930995e-05, |
| "loss": 4.7402, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8566527097420475e-05, |
| "loss": 4.7456, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558141149909955e-05, |
| "loss": 4.7272, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8549755202399435e-05, |
| "loss": 4.746, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541385633692644e-05, |
| "loss": 4.7239, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532999686182124e-05, |
| "loss": 4.7276, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524613738671604e-05, |
| "loss": 4.7133, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8516227791161084e-05, |
| "loss": 4.7231, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8507841843650564e-05, |
| "loss": 4.714, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8499455896140044e-05, |
| "loss": 4.7107, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.7158, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8482684001119e-05, |
| "loss": 4.7195, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847431443241221e-05, |
| "loss": 4.7115, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846592848490169e-05, |
| "loss": 4.7133, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845754253739117e-05, |
| "loss": 4.7049, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844915658988065e-05, |
| "loss": 4.7127, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844078702117387e-05, |
| "loss": 4.6846, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843240107366335e-05, |
| "loss": 4.7, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842401512615282e-05, |
| "loss": 4.6753, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84156291786423e-05, |
| "loss": 4.6969, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.6798, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839885728362126e-05, |
| "loss": 4.6931, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839048771491447e-05, |
| "loss": 4.6707, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838210176740395e-05, |
| "loss": 4.6883, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8373732198697167e-05, |
| "loss": 4.6851, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8365346251186646e-05, |
| "loss": 4.6823, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8356960303676126e-05, |
| "loss": 4.6896, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8348574356165606e-05, |
| "loss": 4.6592, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8340188408655086e-05, |
| "loss": 4.6627, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8331802461144566e-05, |
| "loss": 4.6764, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8323416513634046e-05, |
| "loss": 4.6683, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8315030566123526e-05, |
| "loss": 4.6661, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8306644618613006e-05, |
| "loss": 4.6588, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298258671102486e-05, |
| "loss": 4.6525, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.6464, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.6638, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273117207374655e-05, |
| "loss": 4.6391, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.826474763866787e-05, |
| "loss": 4.6505, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.825636169115735e-05, |
| "loss": 4.6565, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.824797574364683e-05, |
| "loss": 4.6378, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823958979613631e-05, |
| "loss": 4.639, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823120384862579e-05, |
| "loss": 4.6387, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.822281790111527e-05, |
| "loss": 4.6248, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821443195360475e-05, |
| "loss": 4.6308, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820604600609423e-05, |
| "loss": 4.6469, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819767643738744e-05, |
| "loss": 4.6288, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818929048987692e-05, |
| "loss": 4.6233, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81809045423664e-05, |
| "loss": 4.6215, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817253497365961e-05, |
| "loss": 4.6225, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816414902614909e-05, |
| "loss": 4.6368, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.815576307863857e-05, |
| "loss": 4.6287, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814737713112805e-05, |
| "loss": 4.6186, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8138991183617535e-05, |
| "loss": 4.6339, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813060523610701e-05, |
| "loss": 4.6298, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812221928859649e-05, |
| "loss": 4.6157, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8113849719889704e-05, |
| "loss": 4.6125, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8105463772379184e-05, |
| "loss": 4.6117, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809707782486866e-05, |
| "loss": 4.6085, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808869187735814e-05, |
| "loss": 4.6052, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808030592984762e-05, |
| "loss": 4.6101, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80719199823371e-05, |
| "loss": 4.6155, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8063550413630307e-05, |
| "loss": 4.6164, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8055164466119786e-05, |
| "loss": 4.6042, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804677851860927e-05, |
| "loss": 4.5821, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803839257109875e-05, |
| "loss": 4.5978, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803000662358823e-05, |
| "loss": 4.6017, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802162067607771e-05, |
| "loss": 4.6037, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801323472856719e-05, |
| "loss": 4.5955, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80048651598604e-05, |
| "loss": 4.5924, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799647921234988e-05, |
| "loss": 4.5976, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798809326483936e-05, |
| "loss": 4.5901, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797970731732884e-05, |
| "loss": 4.5851, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797132136981832e-05, |
| "loss": 4.5917, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79629354223078e-05, |
| "loss": 4.5754, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795454947479728e-05, |
| "loss": 4.5956, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794616352728676e-05, |
| "loss": 4.5863, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793777757977624e-05, |
| "loss": 4.5659, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792940801106946e-05, |
| "loss": 4.5785, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792103844236267e-05, |
| "loss": 4.5765, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791265249485215e-05, |
| "loss": 4.5881, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790426654734163e-05, |
| "loss": 4.5757, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7895880599831107e-05, |
| "loss": 4.571, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887494652320587e-05, |
| "loss": 4.5641, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7879108704810067e-05, |
| "loss": 4.573, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7870722757299546e-05, |
| "loss": 4.5551, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7862353188592756e-05, |
| "loss": 4.5646, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7853967241082236e-05, |
| "loss": 4.5609, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845581293571715e-05, |
| "loss": 4.5635, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837195346061195e-05, |
| "loss": 4.5525, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7828809398550675e-05, |
| "loss": 4.5622, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7820423451040155e-05, |
| "loss": 4.5551, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7812037503529635e-05, |
| "loss": 4.5553, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803651556019115e-05, |
| "loss": 4.5572, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795265608508595e-05, |
| "loss": 4.5466, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786896039801804e-05, |
| "loss": 4.5705, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7778510092291284e-05, |
| "loss": 4.5544, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7770124144780764e-05, |
| "loss": 4.5537, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7761738197270244e-05, |
| "loss": 4.5438, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7753352249759724e-05, |
| "loss": 4.5499, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774498268105293e-05, |
| "loss": 4.5502, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773659673354241e-05, |
| "loss": 4.5402, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772821078603189e-05, |
| "loss": 4.5556, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771982483852137e-05, |
| "loss": 4.5424, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771145526981459e-05, |
| "loss": 4.5371, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770306932230407e-05, |
| "loss": 4.5334, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769468337479355e-05, |
| "loss": 4.5353, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768629742728303e-05, |
| "loss": 4.5338, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767792785857624e-05, |
| "loss": 4.5353, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766955828986945e-05, |
| "loss": 4.5217, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766117234235893e-05, |
| "loss": 4.5502, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765278639484841e-05, |
| "loss": 4.5402, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764440044733789e-05, |
| "loss": 4.5306, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763601449982737e-05, |
| "loss": 4.5287, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762762855231685e-05, |
| "loss": 4.5234, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761924260480633e-05, |
| "loss": 4.5343, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610856657295813e-05, |
| "loss": 4.5312, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602470709785293e-05, |
| "loss": 4.5271, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75941011410785e-05, |
| "loss": 4.532, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758571519356798e-05, |
| "loss": 4.528, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757732924605746e-05, |
| "loss": 4.5252, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756894329854694e-05, |
| "loss": 4.5177, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756055735103642e-05, |
| "loss": 4.5303, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755218778232963e-05, |
| "loss": 4.5181, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754380183481911e-05, |
| "loss": 4.5205, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753541588730859e-05, |
| "loss": 4.5149, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752702993979807e-05, |
| "loss": 4.5163, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751864399228755e-05, |
| "loss": 4.5176, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751025804477703e-05, |
| "loss": 4.5059, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750188847607025e-05, |
| "loss": 4.5105, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.484802722930908, |
| "eval_runtime": 296.4897, |
| "eval_samples_per_second": 1287.029, |
| "eval_steps_per_second": 40.221, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749350252855973e-05, |
| "loss": 4.4981, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748511658104921e-05, |
| "loss": 4.4961, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747673063353868e-05, |
| "loss": 4.5236, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746834468602816e-05, |
| "loss": 4.5142, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745995873851764e-05, |
| "loss": 4.5038, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745157279100712e-05, |
| "loss": 4.5012, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74431868434966e-05, |
| "loss": 4.506, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.743480089598608e-05, |
| "loss": 4.4896, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.742641494847556e-05, |
| "loss": 4.5071, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741802900096504e-05, |
| "loss": 4.5003, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740964305345452e-05, |
| "loss": 4.5039, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401273484747736e-05, |
| "loss": 4.5054, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392887537237216e-05, |
| "loss": 4.4864, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384501589726696e-05, |
| "loss": 4.4917, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376115642216176e-05, |
| "loss": 4.4858, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7367729694705656e-05, |
| "loss": 4.4861, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7359343747195135e-05, |
| "loss": 4.4895, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7350974178488345e-05, |
| "loss": 4.482, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7342588230977825e-05, |
| "loss": 4.4888, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7334202283467305e-05, |
| "loss": 4.5077, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7325816335956784e-05, |
| "loss": 4.4862, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317430388446264e-05, |
| "loss": 4.4911, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730904444093574e-05, |
| "loss": 4.4839, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730065849342522e-05, |
| "loss": 4.4964, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292272545914704e-05, |
| "loss": 4.4743, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7283886598404184e-05, |
| "loss": 4.487, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7275500650893664e-05, |
| "loss": 4.4695, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7267114703383144e-05, |
| "loss": 4.4742, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258728755872624e-05, |
| "loss": 4.4732, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7250342808362104e-05, |
| "loss": 4.4694, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7241956860851584e-05, |
| "loss": 4.4769, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7233570913341064e-05, |
| "loss": 4.4819, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225184965830544e-05, |
| "loss": 4.4747, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721681539712375e-05, |
| "loss": 4.4808, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720844582841696e-05, |
| "loss": 4.4664, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720005988090644e-05, |
| "loss": 4.4834, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719169031219965e-05, |
| "loss": 4.4518, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718330436468914e-05, |
| "loss": 4.4728, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717491841717862e-05, |
| "loss": 4.4466, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71665324696681e-05, |
| "loss": 4.4688, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715814652215758e-05, |
| "loss": 4.4563, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714976057464706e-05, |
| "loss": 4.4698, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714137462713654e-05, |
| "loss": 4.4504, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.713298867962602e-05, |
| "loss": 4.4623, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71246027321155e-05, |
| "loss": 4.4651, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711623316340871e-05, |
| "loss": 4.4636, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710784721589819e-05, |
| "loss": 4.4675, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099461268387667e-05, |
| "loss": 4.4449, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091075320877147e-05, |
| "loss": 4.4427, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7082705752170356e-05, |
| "loss": 4.4636, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074319804659836e-05, |
| "loss": 4.4548, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706593385714932e-05, |
| "loss": 4.4496, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70575479096388e-05, |
| "loss": 4.4486, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704916196212828e-05, |
| "loss": 4.4431, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704079239342149e-05, |
| "loss": 4.4343, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.703240644591097e-05, |
| "loss": 4.4506, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702402049840045e-05, |
| "loss": 4.4322, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015634550889924e-05, |
| "loss": 4.4423, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7007248603379404e-05, |
| "loss": 4.4528, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6998862655868884e-05, |
| "loss": 4.4318, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69904930871621e-05, |
| "loss": 4.4342, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6982107139651573e-05, |
| "loss": 4.438, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697372119214106e-05, |
| "loss": 4.4226, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696533524463054e-05, |
| "loss": 4.4251, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695694929712002e-05, |
| "loss": 4.4461, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69485633496095e-05, |
| "loss": 4.4319, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694019378090271e-05, |
| "loss": 4.4243, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693180783339219e-05, |
| "loss": 4.4247, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692342188588167e-05, |
| "loss": 4.4279, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691503593837115e-05, |
| "loss": 4.4411, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690664999086063e-05, |
| "loss": 4.4358, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689828042215384e-05, |
| "loss": 4.424, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688989447464332e-05, |
| "loss": 4.4405, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68815085271328e-05, |
| "loss": 4.4339, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687312257962228e-05, |
| "loss": 4.4245, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686473663211176e-05, |
| "loss": 4.4208, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856367063404974e-05, |
| "loss": 4.4274, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6847981115894454e-05, |
| "loss": 4.4217, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683961154718766e-05, |
| "loss": 4.4193, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683122559967714e-05, |
| "loss": 4.4202, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682283965216662e-05, |
| "loss": 4.4273, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68144537046561e-05, |
| "loss": 4.4322, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680606775714558e-05, |
| "loss": 4.418, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679768180963506e-05, |
| "loss": 4.4032, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678929586212454e-05, |
| "loss": 4.4109, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678090991461402e-05, |
| "loss": 4.4159, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.67725239671035e-05, |
| "loss": 4.423, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676415439839671e-05, |
| "loss": 4.4125, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675576845088619e-05, |
| "loss": 4.4107, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674738250337568e-05, |
| "loss": 4.4209, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673899655586516e-05, |
| "loss": 4.407, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673062698715837e-05, |
| "loss": 4.4078, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722257418451576e-05, |
| "loss": 4.412, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713871470941056e-05, |
| "loss": 4.4042, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705485523430536e-05, |
| "loss": 4.4219, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697099575920016e-05, |
| "loss": 4.4067, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688713628409496e-05, |
| "loss": 4.3949, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6680327680898976e-05, |
| "loss": 4.4056, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6671958112192185e-05, |
| "loss": 4.4026, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663572164681665e-05, |
| "loss": 4.4202, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6655186217171145e-05, |
| "loss": 4.4087, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664680026966063e-05, |
| "loss": 4.3994, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663841432215011e-05, |
| "loss": 4.3899, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663002837463959e-05, |
| "loss": 4.404, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6621642427129065e-05, |
| "loss": 4.3874, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6613256479618545e-05, |
| "loss": 4.3986, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6604870532108025e-05, |
| "loss": 4.3942, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6596484584597505e-05, |
| "loss": 4.3958, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6588115015890714e-05, |
| "loss": 4.3849, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6579729068380194e-05, |
| "loss": 4.4033, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6571343120869674e-05, |
| "loss": 4.3877, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6562957173359154e-05, |
| "loss": 4.3884, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6554571225848634e-05, |
| "loss": 4.3971, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654620165714185e-05, |
| "loss": 4.3843, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653781570963133e-05, |
| "loss": 4.4062, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652944614092454e-05, |
| "loss": 4.3935, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652106019341402e-05, |
| "loss": 4.3938, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65126742459035e-05, |
| "loss": 4.3856, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650428829839298e-05, |
| "loss": 4.3896, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649590235088246e-05, |
| "loss": 4.3915, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648753278217567e-05, |
| "loss": 4.3804, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647914683466515e-05, |
| "loss": 4.3956, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647076088715463e-05, |
| "loss": 4.3881, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.646237493964411e-05, |
| "loss": 4.3805, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645398899213359e-05, |
| "loss": 4.3823, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644560304462307e-05, |
| "loss": 4.3803, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6437217097112554e-05, |
| "loss": 4.3731, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642884752840576e-05, |
| "loss": 4.3875, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642046158089524e-05, |
| "loss": 4.371, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641207563338472e-05, |
| "loss": 4.3925, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64036896858742e-05, |
| "loss": 4.3894, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639530373836368e-05, |
| "loss": 4.3771, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638691779085316e-05, |
| "loss": 4.3755, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637853184334264e-05, |
| "loss": 4.3741, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637014589583212e-05, |
| "loss": 4.3834, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6361759948321596e-05, |
| "loss": 4.3815, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635339037961481e-05, |
| "loss": 4.3775, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634502081090802e-05, |
| "loss": 4.3807, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633663486339751e-05, |
| "loss": 4.3795, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632824891588699e-05, |
| "loss": 4.3808, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631986296837647e-05, |
| "loss": 4.3689, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631147702086595e-05, |
| "loss": 4.387, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630309107335542e-05, |
| "loss": 4.3739, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.62947051258449e-05, |
| "loss": 4.3712, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628633555713812e-05, |
| "loss": 4.3726, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277949609627597e-05, |
| "loss": 4.3748, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626956366211707e-05, |
| "loss": 4.3699, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626117771460655e-05, |
| "loss": 4.3608, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625279176709603e-05, |
| "loss": 4.364, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.348752975463867, |
| "eval_runtime": 286.9409, |
| "eval_samples_per_second": 1329.859, |
| "eval_steps_per_second": 41.559, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6244422198389246e-05, |
| "loss": 4.366, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6236036250878726e-05, |
| "loss": 4.3528, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622766668217194e-05, |
| "loss": 4.3814, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621928073466142e-05, |
| "loss": 4.3731, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6210894787150895e-05, |
| "loss": 4.3682, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6202508839640374e-05, |
| "loss": 4.357, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6194122892129854e-05, |
| "loss": 4.3695, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6185736944619334e-05, |
| "loss": 4.3493, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6177350997108814e-05, |
| "loss": 4.3695, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6168965049598294e-05, |
| "loss": 4.3651, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61605954808915e-05, |
| "loss": 4.3624, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615220953338098e-05, |
| "loss": 4.3725, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614382358587046e-05, |
| "loss": 4.349, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.613543763835994e-05, |
| "loss": 4.3587, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612705169084942e-05, |
| "loss": 4.3471, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611866574333891e-05, |
| "loss": 4.3542, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611027979582839e-05, |
| "loss": 4.3514, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.610189384831787e-05, |
| "loss": 4.3499, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.609350790080735e-05, |
| "loss": 4.3532, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.608512195329683e-05, |
| "loss": 4.3726, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.607673600578631e-05, |
| "loss": 4.3523, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606835005827578e-05, |
| "loss": 4.3582, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.605999686837273e-05, |
| "loss": 4.3521, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.605161092086221e-05, |
| "loss": 4.3643, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.604322497335169e-05, |
| "loss": 4.344, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.603483902584117e-05, |
| "loss": 4.3524, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.602645307833065e-05, |
| "loss": 4.3407, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.601806713082013e-05, |
| "loss": 4.3423, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.600968118330961e-05, |
| "loss": 4.3412, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600129523579909e-05, |
| "loss": 4.3459, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599290928828857e-05, |
| "loss": 4.3437, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598452334077805e-05, |
| "loss": 4.3541, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597613739326753e-05, |
| "loss": 4.3464, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.596775144575701e-05, |
| "loss": 4.3502, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595939825585395e-05, |
| "loss": 4.3434, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595101230834343e-05, |
| "loss": 4.3531, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5942626360832906e-05, |
| "loss": 4.3323, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.593425679212612e-05, |
| "loss": 4.3451, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.59258708446156e-05, |
| "loss": 4.3213, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.591748489710508e-05, |
| "loss": 4.3408, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590909894959456e-05, |
| "loss": 4.3355, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590071300208404e-05, |
| "loss": 4.3473, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.589232705457352e-05, |
| "loss": 4.3288, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5883941107063e-05, |
| "loss": 4.3392, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.587555515955248e-05, |
| "loss": 4.3421, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.586718559084569e-05, |
| "loss": 4.3452, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585879964333517e-05, |
| "loss": 4.3392, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585041369582465e-05, |
| "loss": 4.3278, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.584202774831413e-05, |
| "loss": 4.319, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.583364180080361e-05, |
| "loss": 4.3424, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582527223209682e-05, |
| "loss": 4.337, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.58168862845863e-05, |
| "loss": 4.3304, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5808516715879515e-05, |
| "loss": 4.327, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5800130768368995e-05, |
| "loss": 4.3281, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5791744820858475e-05, |
| "loss": 4.3115, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5783375252151684e-05, |
| "loss": 4.3353, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5774989304641164e-05, |
| "loss": 4.3112, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5766603357130644e-05, |
| "loss": 4.3256, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5758217409620124e-05, |
| "loss": 4.3326, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5749831462109604e-05, |
| "loss": 4.3201, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5741445514599084e-05, |
| "loss": 4.3135, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5733059567088564e-05, |
| "loss": 4.3226, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5724673619578044e-05, |
| "loss": 4.3064, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5716287672067524e-05, |
| "loss": 4.314, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.570791810336074e-05, |
| "loss": 4.3265, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569953215585022e-05, |
| "loss": 4.318, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.56911462083397e-05, |
| "loss": 4.3102, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.568276026082918e-05, |
| "loss": 4.306, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.567437431331866e-05, |
| "loss": 4.3142, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.566598836580814e-05, |
| "loss": 4.3224, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.565760241829762e-05, |
| "loss": 4.3221, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564921647078709e-05, |
| "loss": 4.315, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564084690208031e-05, |
| "loss": 4.3246, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.563246095456979e-05, |
| "loss": 4.3248, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.562407500705927e-05, |
| "loss": 4.3102, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.561568905954874e-05, |
| "loss": 4.309, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.560730311203822e-05, |
| "loss": 4.3175, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559891716452771e-05, |
| "loss": 4.3128, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559053121701719e-05, |
| "loss": 4.311, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558214526950667e-05, |
| "loss": 4.3075, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557377570079988e-05, |
| "loss": 4.315, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556538975328936e-05, |
| "loss": 4.321, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555700380577884e-05, |
| "loss": 4.3115, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554861785826832e-05, |
| "loss": 4.2955, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5540248289561526e-05, |
| "loss": 4.2983, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5531862342051006e-05, |
| "loss": 4.3058, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5523476394540486e-05, |
| "loss": 4.3146, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5515090447029966e-05, |
| "loss": 4.3093, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5506720878323175e-05, |
| "loss": 4.2987, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5498334930812655e-05, |
| "loss": 4.3116, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548996536210587e-05, |
| "loss": 4.3041, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548157941459535e-05, |
| "loss": 4.3001, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.547319346708483e-05, |
| "loss": 4.3022, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.546480751957431e-05, |
| "loss": 4.3061, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.545642157206379e-05, |
| "loss": 4.3153, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544803562455327e-05, |
| "loss": 4.2963, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543964967704275e-05, |
| "loss": 4.2864, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543126372953223e-05, |
| "loss": 4.3042, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.542287778202171e-05, |
| "loss": 4.2981, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.541450821331492e-05, |
| "loss": 4.3129, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.54061222658044e-05, |
| "loss": 4.3055, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.539773631829388e-05, |
| "loss": 4.2937, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538935037078336e-05, |
| "loss": 4.2878, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538096442327284e-05, |
| "loss": 4.3039, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5372594854566055e-05, |
| "loss": 4.2841, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5364208907055535e-05, |
| "loss": 4.2963, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5355822959545015e-05, |
| "loss": 4.2889, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5347437012034495e-05, |
| "loss": 4.2973, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5339051064523975e-05, |
| "loss": 4.2819, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5330665117013455e-05, |
| "loss": 4.3016, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5322295548306664e-05, |
| "loss": 4.2872, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5313909600796144e-05, |
| "loss": 4.2846, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5305523653285624e-05, |
| "loss": 4.2996, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5297137705775104e-05, |
| "loss": 4.2831, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528875175826458e-05, |
| "loss": 4.309, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528038218955779e-05, |
| "loss": 4.2958, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527201262085101e-05, |
| "loss": 4.2937, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526362667334049e-05, |
| "loss": 4.2863, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.525524072582997e-05, |
| "loss": 4.2901, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.524685477831945e-05, |
| "loss": 4.2905, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.523846883080893e-05, |
| "loss": 4.2849, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.52300828832984e-05, |
| "loss": 4.295, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.522169693578788e-05, |
| "loss": 4.2888, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.521331098827736e-05, |
| "loss": 4.2837, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.520494141957057e-05, |
| "loss": 4.2852, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.519655547206005e-05, |
| "loss": 4.2854, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518816952454953e-05, |
| "loss": 4.2779, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517978357703902e-05, |
| "loss": 4.293, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517141400833223e-05, |
| "loss": 4.2739, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.516302806082171e-05, |
| "loss": 4.293, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515465849211492e-05, |
| "loss": 4.2889, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51462725446044e-05, |
| "loss": 4.2874, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5137886597093876e-05, |
| "loss": 4.278, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5129500649583356e-05, |
| "loss": 4.2803, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5121114702072836e-05, |
| "loss": 4.2898, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5112728754562316e-05, |
| "loss": 4.2893, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5104342807051795e-05, |
| "loss": 4.2788, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5095956859541275e-05, |
| "loss": 4.286, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5087570912030755e-05, |
| "loss": 4.2844, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5079184964520235e-05, |
| "loss": 4.2908, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5070799017009715e-05, |
| "loss": 4.2756, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5062413069499195e-05, |
| "loss": 4.2918, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.505402712198868e-05, |
| "loss": 4.2837, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.504565755328189e-05, |
| "loss": 4.2786, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503727160577137e-05, |
| "loss": 4.2777, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502888565826085e-05, |
| "loss": 4.2852, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502049971075033e-05, |
| "loss": 4.2756, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501211376323981e-05, |
| "loss": 4.2644, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.500374419453302e-05, |
| "loss": 4.2808, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.265576362609863, |
| "eval_runtime": 284.2057, |
| "eval_samples_per_second": 1342.658, |
| "eval_steps_per_second": 41.959, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49953582470225e-05, |
| "loss": 4.2686, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.498697229951198e-05, |
| "loss": 4.2613, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497858635200146e-05, |
| "loss": 4.2868, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497020040449093e-05, |
| "loss": 4.2825, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496181445698042e-05, |
| "loss": 4.2788, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49534285094699e-05, |
| "loss": 4.2714, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494504256195938e-05, |
| "loss": 4.2755, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493665661444886e-05, |
| "loss": 4.2564, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492827066693834e-05, |
| "loss": 4.2801, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.491988471942782e-05, |
| "loss": 4.2775, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49114987719173e-05, |
| "loss": 4.2703, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.490311282440678e-05, |
| "loss": 4.2825, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.489474325569999e-05, |
| "loss": 4.2615, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488635730818947e-05, |
| "loss": 4.2711, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487797136067895e-05, |
| "loss": 4.2626, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486958541316843e-05, |
| "loss": 4.2605, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486119946565791e-05, |
| "loss": 4.2641, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485281351814739e-05, |
| "loss": 4.2616, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484442757063687e-05, |
| "loss": 4.2669, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483604162312635e-05, |
| "loss": 4.2819, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4827672054419564e-05, |
| "loss": 4.2703, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4819286106909044e-05, |
| "loss": 4.2753, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4810900159398524e-05, |
| "loss": 4.2647, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4802514211888004e-05, |
| "loss": 4.2747, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479412826437748e-05, |
| "loss": 4.26, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478575869567069e-05, |
| "loss": 4.2663, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477737274816017e-05, |
| "loss": 4.2586, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476898680064965e-05, |
| "loss": 4.2541, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4760600853139126e-05, |
| "loss": 4.2539, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4752214905628606e-05, |
| "loss": 4.261, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474384533692182e-05, |
| "loss": 4.2595, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.47354593894113e-05, |
| "loss": 4.2697, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472707344190078e-05, |
| "loss": 4.2638, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471868749439026e-05, |
| "loss": 4.2664, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471031792568348e-05, |
| "loss": 4.2563, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470193197817295e-05, |
| "loss": 4.2709, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469354603066243e-05, |
| "loss": 4.252, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468516008315191e-05, |
| "loss": 4.2576, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.467677413564139e-05, |
| "loss": 4.2414, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466838818813087e-05, |
| "loss": 4.2553, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466000224062035e-05, |
| "loss": 4.2538, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465161629310983e-05, |
| "loss": 4.2635, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464324672440304e-05, |
| "loss": 4.2469, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4634860776892526e-05, |
| "loss": 4.2577, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4626474829382006e-05, |
| "loss": 4.2597, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4618088881871486e-05, |
| "loss": 4.262, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4609702934360966e-05, |
| "loss": 4.2588, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4601333365654175e-05, |
| "loss": 4.2439, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4592947418143655e-05, |
| "loss": 4.2358, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4584577849436864e-05, |
| "loss": 4.2622, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4576191901926344e-05, |
| "loss": 4.2546, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4567805954415824e-05, |
| "loss": 4.2488, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4559420006905304e-05, |
| "loss": 4.2493, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551034059394784e-05, |
| "loss": 4.2469, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542648111884264e-05, |
| "loss": 4.2312, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534262164373744e-05, |
| "loss": 4.2499, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4525876216863224e-05, |
| "loss": 4.2348, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4517490269352704e-05, |
| "loss": 4.2431, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450912070064592e-05, |
| "loss": 4.2552, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.45007347531354e-05, |
| "loss": 4.2395, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.449234880562488e-05, |
| "loss": 4.2344, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448397923691809e-05, |
| "loss": 4.2484, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447559328940757e-05, |
| "loss": 4.2262, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446720734189705e-05, |
| "loss": 4.2383, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445882139438653e-05, |
| "loss": 4.2451, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445043544687601e-05, |
| "loss": 4.2433, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444204949936548e-05, |
| "loss": 4.2332, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443366355185496e-05, |
| "loss": 4.2317, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442527760434444e-05, |
| "loss": 4.2337, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441689165683393e-05, |
| "loss": 4.2447, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440850570932341e-05, |
| "loss": 4.2441, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440013614061662e-05, |
| "loss": 4.2392, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43917501931061e-05, |
| "loss": 4.2473, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438336424559558e-05, |
| "loss": 4.2494, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437497829808506e-05, |
| "loss": 4.2365, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436659235057454e-05, |
| "loss": 4.2291, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435820640306402e-05, |
| "loss": 4.2465, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4349836834357227e-05, |
| "loss": 4.2391, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4341450886846707e-05, |
| "loss": 4.2317, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4333064939336186e-05, |
| "loss": 4.2336, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4324678991825666e-05, |
| "loss": 4.2407, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4316293044315146e-05, |
| "loss": 4.248, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4307907096804626e-05, |
| "loss": 4.2341, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429952114929411e-05, |
| "loss": 4.2216, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429115158058732e-05, |
| "loss": 4.2305, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.42827656330768e-05, |
| "loss": 4.2251, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427437968556628e-05, |
| "loss": 4.2401, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.426599373805576e-05, |
| "loss": 4.2359, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.425760779054524e-05, |
| "loss": 4.2232, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424923822183845e-05, |
| "loss": 4.239, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424085227432793e-05, |
| "loss": 4.2286, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423246632681741e-05, |
| "loss": 4.2319, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.422408037930689e-05, |
| "loss": 4.2252, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421569443179637e-05, |
| "loss": 4.2309, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.420730848428585e-05, |
| "loss": 4.2442, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419892253677533e-05, |
| "loss": 4.229, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4190569346872276e-05, |
| "loss": 4.2115, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4182183399361756e-05, |
| "loss": 4.2284, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4173797451851236e-05, |
| "loss": 4.228, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4165411504340716e-05, |
| "loss": 4.2352, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4157025556830196e-05, |
| "loss": 4.2376, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4148639609319676e-05, |
| "loss": 4.2229, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414025366180915e-05, |
| "loss": 4.2179, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413186771429863e-05, |
| "loss": 4.2312, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4123498145591845e-05, |
| "loss": 4.2083, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411511219808132e-05, |
| "loss": 4.2252, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4106726250570805e-05, |
| "loss": 4.2173, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4098340303060284e-05, |
| "loss": 4.2269, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4089954355549764e-05, |
| "loss": 4.2133, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4081584786842974e-05, |
| "loss": 4.2298, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407321521813619e-05, |
| "loss": 4.2157, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406482927062567e-05, |
| "loss": 4.2143, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405644332311514e-05, |
| "loss": 4.2289, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404805737560462e-05, |
| "loss": 4.2173, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40396714280941e-05, |
| "loss": 4.2373, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403128548058358e-05, |
| "loss": 4.2256, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402291591187679e-05, |
| "loss": 4.2195, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.401452996436627e-05, |
| "loss": 4.2261, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400614401685576e-05, |
| "loss": 4.2136, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399775806934524e-05, |
| "loss": 4.223, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398938850063845e-05, |
| "loss": 4.216, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398100255312793e-05, |
| "loss": 4.2279, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397261660561741e-05, |
| "loss": 4.2168, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396423065810689e-05, |
| "loss": 4.2133, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395584471059637e-05, |
| "loss": 4.2162, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394745876308585e-05, |
| "loss": 4.2193, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393907281557533e-05, |
| "loss": 4.2096, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393068686806481e-05, |
| "loss": 4.2229, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392230092055429e-05, |
| "loss": 4.2092, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3913931351847496e-05, |
| "loss": 4.2254, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3905545404336976e-05, |
| "loss": 4.2192, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3897159456826456e-05, |
| "loss": 4.2202, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3888773509315936e-05, |
| "loss": 4.2113, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388040394060915e-05, |
| "loss": 4.2122, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.387201799309863e-05, |
| "loss": 4.2254, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.386364842439184e-05, |
| "loss": 4.2173, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.385526247688132e-05, |
| "loss": 4.2123, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.38468765293708e-05, |
| "loss": 4.2219, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383849058186028e-05, |
| "loss": 4.2171, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383010463434976e-05, |
| "loss": 4.2248, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382173506564297e-05, |
| "loss": 4.2127, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.381334911813245e-05, |
| "loss": 4.2229, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380496317062193e-05, |
| "loss": 4.2153, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.379657722311141e-05, |
| "loss": 4.2126, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378819127560089e-05, |
| "loss": 4.2145, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3779805328090376e-05, |
| "loss": 4.2153, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3771419380579856e-05, |
| "loss": 4.2131, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3763033433069336e-05, |
| "loss": 4.2008, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375464748555881e-05, |
| "loss": 4.2147, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.208554267883301, |
| "eval_runtime": 299.6848, |
| "eval_samples_per_second": 1273.308, |
| "eval_steps_per_second": 39.792, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.374626153804829e-05, |
| "loss": 4.2084, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.373787559053777e-05, |
| "loss": 4.1978, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372948964302725e-05, |
| "loss": 4.22, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372112007432046e-05, |
| "loss": 4.22, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.371273412680994e-05, |
| "loss": 4.2119, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370434817929942e-05, |
| "loss": 4.2075, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36959622317889e-05, |
| "loss": 4.2095, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368757628427838e-05, |
| "loss": 4.1966, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367919033676786e-05, |
| "loss": 4.2111, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3670820768061074e-05, |
| "loss": 4.2163, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3662434820550554e-05, |
| "loss": 4.2065, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.365406525184376e-05, |
| "loss": 4.2147, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.364567930433324e-05, |
| "loss": 4.1994, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363729335682272e-05, |
| "loss": 4.2089, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36289074093122e-05, |
| "loss": 4.1973, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362052146180168e-05, |
| "loss": 4.1983, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361213551429116e-05, |
| "loss": 4.1998, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360374956678064e-05, |
| "loss": 4.1997, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359536361927012e-05, |
| "loss": 4.205, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.358699405056333e-05, |
| "loss": 4.2149, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357860810305281e-05, |
| "loss": 4.2128, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.35702221555423e-05, |
| "loss": 4.2126, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.356183620803178e-05, |
| "loss": 4.2013, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.355345026052126e-05, |
| "loss": 4.2152, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.354506431301074e-05, |
| "loss": 4.1951, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.353667836550022e-05, |
| "loss": 4.2078, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352829241798969e-05, |
| "loss": 4.1973, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351990647047917e-05, |
| "loss": 4.1897, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351153690177239e-05, |
| "loss": 4.1963, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3503167333065596e-05, |
| "loss": 4.1975, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3494781385555076e-05, |
| "loss": 4.2015, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3486395438044556e-05, |
| "loss": 4.2058, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3478009490534036e-05, |
| "loss": 4.2071, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3469623543023516e-05, |
| "loss": 4.2053, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346125397431673e-05, |
| "loss": 4.1949, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.345286802680621e-05, |
| "loss": 4.2069, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.344448207929569e-05, |
| "loss": 4.1948, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3436096131785165e-05, |
| "loss": 4.1915, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3427710184274645e-05, |
| "loss": 4.1868, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3419324236764125e-05, |
| "loss": 4.1923, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3410938289253605e-05, |
| "loss": 4.1977, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402552341743085e-05, |
| "loss": 4.2024, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3394182773036294e-05, |
| "loss": 4.1899, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3385796825525774e-05, |
| "loss": 4.1967, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3377410878015254e-05, |
| "loss": 4.2019, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3369024930504734e-05, |
| "loss": 4.2004, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3360638982994214e-05, |
| "loss": 4.2021, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.335226941428743e-05, |
| "loss": 4.1864, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.334388346677691e-05, |
| "loss": 4.1742, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.333551389807012e-05, |
| "loss": 4.2047, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.33271279505596e-05, |
| "loss": 4.196, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331874200304908e-05, |
| "loss": 4.1941, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331035605553856e-05, |
| "loss": 4.1921, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330197010802804e-05, |
| "loss": 4.1828, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.329358416051752e-05, |
| "loss": 4.176, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.328521459181073e-05, |
| "loss": 4.188, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.327684502310394e-05, |
| "loss": 4.1776, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3268459075593424e-05, |
| "loss": 4.1862, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3260073128082904e-05, |
| "loss": 4.1972, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3251687180572383e-05, |
| "loss": 4.182, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3243301233061863e-05, |
| "loss": 4.1755, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.323491528555134e-05, |
| "loss": 4.1911, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322654571684455e-05, |
| "loss": 4.1713, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321815976933403e-05, |
| "loss": 4.178, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320977382182351e-05, |
| "loss": 4.1891, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320138787431299e-05, |
| "loss": 4.1864, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.319300192680247e-05, |
| "loss": 4.1772, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.318463235809568e-05, |
| "loss": 4.1774, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.317624641058516e-05, |
| "loss": 4.1746, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.316786046307464e-05, |
| "loss": 4.185, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.315947451556412e-05, |
| "loss": 4.1906, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.315108856805361e-05, |
| "loss": 4.18, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.314270262054309e-05, |
| "loss": 4.1883, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.313431667303257e-05, |
| "loss": 4.1934, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.312593072552205e-05, |
| "loss": 4.1876, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311754477801153e-05, |
| "loss": 4.1702, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3109158830501e-05, |
| "loss": 4.188, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310077288299048e-05, |
| "loss": 4.1867, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.30924033142837e-05, |
| "loss": 4.1714, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308401736677318e-05, |
| "loss": 4.1835, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3075647798066386e-05, |
| "loss": 4.1798, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3067261850555866e-05, |
| "loss": 4.1925, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3058875903045346e-05, |
| "loss": 4.1805, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3050489955534826e-05, |
| "loss": 4.1607, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3042104008024306e-05, |
| "loss": 4.1785, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3033718060513786e-05, |
| "loss": 4.1714, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3025332113003266e-05, |
| "loss": 4.1816, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3016946165492746e-05, |
| "loss": 4.1795, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3008560217982226e-05, |
| "loss": 4.1717, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3000174270471705e-05, |
| "loss": 4.1788, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2991804701764915e-05, |
| "loss": 4.1784, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2983418754254395e-05, |
| "loss": 4.1699, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2975032806743874e-05, |
| "loss": 4.1764, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2966646859233354e-05, |
| "loss": 4.1805, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2958260911722834e-05, |
| "loss": 4.1882, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2949874964212314e-05, |
| "loss": 4.175, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294150539550553e-05, |
| "loss": 4.1616, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.293311944799501e-05, |
| "loss": 4.1736, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.292473350048449e-05, |
| "loss": 4.1742, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291634755297397e-05, |
| "loss": 4.1843, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290797798426718e-05, |
| "loss": 4.1837, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289959203675666e-05, |
| "loss": 4.1643, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289120608924614e-05, |
| "loss": 4.1665, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.288282014173562e-05, |
| "loss": 4.1805, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28744341942251e-05, |
| "loss": 4.1551, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.286606462551831e-05, |
| "loss": 4.1731, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.285767867800779e-05, |
| "loss": 4.1598, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284929273049727e-05, |
| "loss": 4.1748, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284090678298675e-05, |
| "loss": 4.1611, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283252083547623e-05, |
| "loss": 4.1747, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.282413488796571e-05, |
| "loss": 4.1682, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2815765319258924e-05, |
| "loss": 4.1583, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2807379371748404e-05, |
| "loss": 4.1774, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2798993424237884e-05, |
| "loss": 4.1622, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2790607476727364e-05, |
| "loss": 4.1849, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.278222152921684e-05, |
| "loss": 4.1779, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.277385196051005e-05, |
| "loss": 4.1654, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276546601299953e-05, |
| "loss": 4.1767, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275708006548901e-05, |
| "loss": 4.1612, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2748694117978486e-05, |
| "loss": 4.1697, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2740308170467966e-05, |
| "loss": 4.1671, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.273193860176118e-05, |
| "loss": 4.175, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.272355265425066e-05, |
| "loss": 4.1666, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.271516670674014e-05, |
| "loss": 4.1616, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.270678075922962e-05, |
| "loss": 4.1684, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26983948117191e-05, |
| "loss": 4.1671, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269000886420858e-05, |
| "loss": 4.1573, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268163929550179e-05, |
| "loss": 4.1715, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267325334799127e-05, |
| "loss": 4.1561, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266486740048075e-05, |
| "loss": 4.1811, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.265648145297023e-05, |
| "loss": 4.1627, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264809550545971e-05, |
| "loss": 4.1719, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.263972593675292e-05, |
| "loss": 4.1592, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26313399892424e-05, |
| "loss": 4.1656, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2622954041731886e-05, |
| "loss": 4.174, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2614568094221366e-05, |
| "loss": 4.165, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2606182146710846e-05, |
| "loss": 4.1625, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2597796199200326e-05, |
| "loss": 4.1734, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2589426630493535e-05, |
| "loss": 4.168, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2581040682983015e-05, |
| "loss": 4.1733, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2572654735472495e-05, |
| "loss": 4.1591, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2564268787961975e-05, |
| "loss": 4.1729, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2555882840451455e-05, |
| "loss": 4.1651, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2547496892940935e-05, |
| "loss": 4.1642, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253914370303787e-05, |
| "loss": 4.1654, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253075775552735e-05, |
| "loss": 4.1648, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252237180801684e-05, |
| "loss": 4.1705, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251398586050632e-05, |
| "loss": 4.144, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.25055999129958e-05, |
| "loss": 4.1698, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.167238235473633, |
| "eval_runtime": 299.0901, |
| "eval_samples_per_second": 1275.84, |
| "eval_steps_per_second": 39.871, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249721396548528e-05, |
| "loss": 4.1583, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248882801797476e-05, |
| "loss": 4.1501, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248044207046424e-05, |
| "loss": 4.1675, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247205612295372e-05, |
| "loss": 4.1729, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24636701754432e-05, |
| "loss": 4.1636, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245528422793267e-05, |
| "loss": 4.1567, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244689828042215e-05, |
| "loss": 4.1611, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243851233291163e-05, |
| "loss": 4.1507, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243012638540111e-05, |
| "loss": 4.1634, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242174043789059e-05, |
| "loss": 4.1648, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241335449038007e-05, |
| "loss": 4.1639, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.240496854286955e-05, |
| "loss": 4.1652, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239659897416277e-05, |
| "loss": 4.1566, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238821302665225e-05, |
| "loss": 4.1577, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237982707914173e-05, |
| "loss": 4.1509, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237144113163121e-05, |
| "loss": 4.145, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236305518412069e-05, |
| "loss": 4.1554, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235466923661017e-05, |
| "loss": 4.1498, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234628328909965e-05, |
| "loss": 4.1573, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233789734158913e-05, |
| "loss": 4.1668, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232951139407861e-05, |
| "loss": 4.1684, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232112544656809e-05, |
| "loss": 4.1627, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.231273949905756e-05, |
| "loss": 4.1549, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230436993035078e-05, |
| "loss": 4.1659, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.229598398284026e-05, |
| "loss": 4.1489, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228759803532974e-05, |
| "loss": 4.1633, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227921208781922e-05, |
| "loss": 4.1503, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.22708261403087e-05, |
| "loss": 4.139, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226244019279818e-05, |
| "loss": 4.1513, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2254054245287657e-05, |
| "loss": 4.1498, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2245684676580866e-05, |
| "loss": 4.1496, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2237298729070346e-05, |
| "loss": 4.1603, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2228912781559826e-05, |
| "loss": 4.1577, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2220526834049306e-05, |
| "loss": 4.1577, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2212157265342515e-05, |
| "loss": 4.1485, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2203771317831995e-05, |
| "loss": 4.1666, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2195385370321475e-05, |
| "loss": 4.1465, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2186999422810955e-05, |
| "loss": 4.1408, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217861347530044e-05, |
| "loss": 4.1469, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217022752778992e-05, |
| "loss": 4.1416, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21618415802794e-05, |
| "loss": 4.1515, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215345563276888e-05, |
| "loss": 4.1594, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214506968525836e-05, |
| "loss": 4.1397, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21367164953553e-05, |
| "loss": 4.1488, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212833054784478e-05, |
| "loss": 4.1564, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211994460033426e-05, |
| "loss": 4.1581, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211155865282374e-05, |
| "loss": 4.1564, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.210317270531322e-05, |
| "loss": 4.1432, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20947867578027e-05, |
| "loss": 4.1193, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208640081029218e-05, |
| "loss": 4.1624, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2078031241585395e-05, |
| "loss": 4.1506, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2069645294074875e-05, |
| "loss": 4.1502, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2061259346564355e-05, |
| "loss": 4.1437, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2052873399053835e-05, |
| "loss": 4.1376, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2044487451543315e-05, |
| "loss": 4.1336, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2036101504032795e-05, |
| "loss": 4.1403, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2027715556522275e-05, |
| "loss": 4.1368, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201932960901175e-05, |
| "loss": 4.1404, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201097641910869e-05, |
| "loss": 4.1537, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200259047159817e-05, |
| "loss": 4.1341, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199420452408765e-05, |
| "loss": 4.1312, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198581857657713e-05, |
| "loss": 4.1484, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197744900787035e-05, |
| "loss": 4.1325, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196906306035983e-05, |
| "loss": 4.1342, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196067711284931e-05, |
| "loss": 4.1404, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195229116533879e-05, |
| "loss": 4.1426, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194390521782827e-05, |
| "loss": 4.1315, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193551927031775e-05, |
| "loss": 4.129, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192713332280722e-05, |
| "loss": 4.1297, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19187473752967e-05, |
| "loss": 4.1445, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191036142778618e-05, |
| "loss": 4.1419, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.190197548027566e-05, |
| "loss": 4.139, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189358953276514e-05, |
| "loss": 4.1401, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188520358525462e-05, |
| "loss": 4.15, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18768176377441e-05, |
| "loss": 4.1453, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186844806903731e-05, |
| "loss": 4.129, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18600621215268e-05, |
| "loss": 4.1402, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1851692552820006e-05, |
| "loss": 4.1447, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1843306605309486e-05, |
| "loss": 4.1243, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1834920657798966e-05, |
| "loss": 4.1412, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1826534710288446e-05, |
| "loss": 4.139, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1818148762777926e-05, |
| "loss": 4.1465, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1809762815267406e-05, |
| "loss": 4.1387, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1801393246560615e-05, |
| "loss": 4.1184, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1793007299050095e-05, |
| "loss": 4.1333, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1784621351539575e-05, |
| "loss": 4.129, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1776235404029055e-05, |
| "loss": 4.1427, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1767849456518535e-05, |
| "loss": 4.1329, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1759463509008015e-05, |
| "loss": 4.1291, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175109394030123e-05, |
| "loss": 4.1343, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174270799279071e-05, |
| "loss": 4.1385, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173432204528019e-05, |
| "loss": 4.1243, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.172593609776967e-05, |
| "loss": 4.1328, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171755015025915e-05, |
| "loss": 4.1406, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170916420274863e-05, |
| "loss": 4.1436, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170079463404184e-05, |
| "loss": 4.1332, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.169240868653132e-05, |
| "loss": 4.1203, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16840227390208e-05, |
| "loss": 4.1299, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167563679151028e-05, |
| "loss": 4.1302, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166725084399975e-05, |
| "loss": 4.1413, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165886489648923e-05, |
| "loss": 4.1434, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165047894897872e-05, |
| "loss": 4.1261, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1642109380271935e-05, |
| "loss": 4.124, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163372343276141e-05, |
| "loss": 4.1334, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162533748525089e-05, |
| "loss": 4.1169, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161695153774037e-05, |
| "loss": 4.1322, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160856559022985e-05, |
| "loss": 4.1144, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160017964271933e-05, |
| "loss": 4.1342, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159179369520881e-05, |
| "loss": 4.1211, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158340774769829e-05, |
| "loss": 4.1294, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1575054557795226e-05, |
| "loss": 4.128, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1566668610284706e-05, |
| "loss": 4.1226, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1558282662774186e-05, |
| "loss": 4.1317, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154989671526367e-05, |
| "loss": 4.1206, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154151076775315e-05, |
| "loss": 4.144, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153312482024263e-05, |
| "loss": 4.1354, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152473887273211e-05, |
| "loss": 4.1256, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151636930402532e-05, |
| "loss": 4.135, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15079833565148e-05, |
| "loss": 4.1189, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149959740900428e-05, |
| "loss": 4.1292, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149121146149376e-05, |
| "loss": 4.1272, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148282551398324e-05, |
| "loss": 4.1294, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147443956647272e-05, |
| "loss": 4.1277, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146606999776593e-05, |
| "loss": 4.1235, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145768405025541e-05, |
| "loss": 4.1282, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144929810274489e-05, |
| "loss": 4.1248, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144092853403811e-05, |
| "loss": 4.1187, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1432542586527587e-05, |
| "loss": 4.1315, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1424156639017067e-05, |
| "loss": 4.1123, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1415770691506546e-05, |
| "loss": 4.1399, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1407384743996026e-05, |
| "loss": 4.1255, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1398998796485506e-05, |
| "loss": 4.1314, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1390612848974986e-05, |
| "loss": 4.1148, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1382226901464466e-05, |
| "loss": 4.1296, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1373840953953946e-05, |
| "loss": 4.1317, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136545500644342e-05, |
| "loss": 4.1284, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13570690589329e-05, |
| "loss": 4.1198, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134868311142238e-05, |
| "loss": 4.1344, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1340313542715595e-05, |
| "loss": 4.126, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1331927595205075e-05, |
| "loss": 4.1367, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1323541647694555e-05, |
| "loss": 4.1169, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1315155700184035e-05, |
| "loss": 4.1351, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1306786131477244e-05, |
| "loss": 4.1254, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1298400183966724e-05, |
| "loss": 4.1232, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290014236456204e-05, |
| "loss": 4.1264, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1281628288945684e-05, |
| "loss": 4.1263, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.127325872023889e-05, |
| "loss": 4.1284, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.126487277272837e-05, |
| "loss": 4.1105, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125651958282532e-05, |
| "loss": 4.1303, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.135540008544922, |
| "eval_runtime": 296.2728, |
| "eval_samples_per_second": 1287.972, |
| "eval_steps_per_second": 40.25, |
| "step": 534240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.12481336353148e-05, |
| "loss": 4.1206, |
| "step": 534528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.123974768780428e-05, |
| "loss": 4.1101, |
| "step": 535040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.123136174029376e-05, |
| "loss": 4.1271, |
| "step": 535552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1222975792783245e-05, |
| "loss": 4.1317, |
| "step": 536064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.121458984527272e-05, |
| "loss": 4.1237, |
| "step": 536576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.12062038977622e-05, |
| "loss": 4.1192, |
| "step": 537088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.119781795025168e-05, |
| "loss": 4.1264, |
| "step": 537600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118943200274116e-05, |
| "loss": 4.112, |
| "step": 538112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118104605523064e-05, |
| "loss": 4.1241, |
| "step": 538624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.117266010772012e-05, |
| "loss": 4.1264, |
| "step": 539136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.11642741602096e-05, |
| "loss": 4.1231, |
| "step": 539648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.115588821269908e-05, |
| "loss": 4.1266, |
| "step": 540160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.114751864399229e-05, |
| "loss": 4.1149, |
| "step": 540672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.113913269648177e-05, |
| "loss": 4.1181, |
| "step": 541184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1130746748971247e-05, |
| "loss": 4.1156, |
| "step": 541696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1122360801460727e-05, |
| "loss": 4.1065, |
| "step": 542208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.111397485395021e-05, |
| "loss": 4.117, |
| "step": 542720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.110558890643969e-05, |
| "loss": 4.1127, |
| "step": 543232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.109720295892917e-05, |
| "loss": 4.117, |
| "step": 543744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108881701141865e-05, |
| "loss": 4.1254, |
| "step": 544256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108043106390813e-05, |
| "loss": 4.1315, |
| "step": 544768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1072045116397606e-05, |
| "loss": 4.1301, |
| "step": 545280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1063659168887086e-05, |
| "loss": 4.1134, |
| "step": 545792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.10552896001803e-05, |
| "loss": 4.1251, |
| "step": 546304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1046903652669775e-05, |
| "loss": 4.1151, |
| "step": 546816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1038517705159255e-05, |
| "loss": 4.1219, |
| "step": 547328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1030131757648735e-05, |
| "loss": 4.1155, |
| "step": 547840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1021745810138215e-05, |
| "loss": 4.1031, |
| "step": 548352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1013359862627695e-05, |
| "loss": 4.1157, |
| "step": 548864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.100497391511718e-05, |
| "loss": 4.1065, |
| "step": 549376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.099658796760666e-05, |
| "loss": 4.1135, |
| "step": 549888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098821839889987e-05, |
| "loss": 4.1235, |
| "step": 550400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097983245138935e-05, |
| "loss": 4.1188, |
| "step": 550912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097144650387883e-05, |
| "loss": 4.1186, |
| "step": 551424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.096306055636831e-05, |
| "loss": 4.109, |
| "step": 551936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.095469098766152e-05, |
| "loss": 4.1307, |
| "step": 552448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0946305040151e-05, |
| "loss": 4.1139, |
| "step": 552960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.093791909264048e-05, |
| "loss": 4.1029, |
| "step": 553472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.092953314512996e-05, |
| "loss": 4.1115, |
| "step": 553984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.092114719761944e-05, |
| "loss": 4.1041, |
| "step": 554496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.091276125010892e-05, |
| "loss": 4.1126, |
| "step": 555008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.09043753025984e-05, |
| "loss": 4.1165, |
| "step": 555520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.089598935508788e-05, |
| "loss": 4.1105, |
| "step": 556032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0887619786381095e-05, |
| "loss": 4.1088, |
| "step": 556544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0879233838870575e-05, |
| "loss": 4.1221, |
| "step": 557056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0870864270163784e-05, |
| "loss": 4.1185, |
| "step": 557568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0862478322653264e-05, |
| "loss": 4.1225, |
| "step": 558080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0854092375142744e-05, |
| "loss": 4.1048, |
| "step": 558592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0845706427632224e-05, |
| "loss": 4.0816, |
| "step": 559104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0837336858925433e-05, |
| "loss": 4.1292, |
| "step": 559616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0828950911414913e-05, |
| "loss": 4.111, |
| "step": 560128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082056496390439e-05, |
| "loss": 4.118, |
| "step": 560640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.081217901639387e-05, |
| "loss": 4.1082, |
| "step": 561152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.080379306888335e-05, |
| "loss": 4.0983, |
| "step": 561664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.079540712137283e-05, |
| "loss": 4.0986, |
| "step": 562176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.078702117386231e-05, |
| "loss": 4.1043, |
| "step": 562688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077863522635179e-05, |
| "loss": 4.0977, |
| "step": 563200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077026565764501e-05, |
| "loss": 4.1022, |
| "step": 563712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.076189608893822e-05, |
| "loss": 4.1214, |
| "step": 564224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07535101414277e-05, |
| "loss": 4.0966, |
| "step": 564736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.074512419391718e-05, |
| "loss": 4.0936, |
| "step": 565248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.073673824640666e-05, |
| "loss": 4.1154, |
| "step": 565760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072835229889614e-05, |
| "loss": 4.0919, |
| "step": 566272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.071996635138561e-05, |
| "loss": 4.0971, |
| "step": 566784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.071159678267883e-05, |
| "loss": 4.1057, |
| "step": 567296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.070321083516831e-05, |
| "loss": 4.1069, |
| "step": 567808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.069482488765779e-05, |
| "loss": 4.0943, |
| "step": 568320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.068643894014727e-05, |
| "loss": 4.1012, |
| "step": 568832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.067805299263675e-05, |
| "loss": 4.0887, |
| "step": 569344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.066966704512623e-05, |
| "loss": 4.1083, |
| "step": 569856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.066128109761571e-05, |
| "loss": 4.1076, |
| "step": 570368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.065289515010519e-05, |
| "loss": 4.1034, |
| "step": 570880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.064450920259467e-05, |
| "loss": 4.1054, |
| "step": 571392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0636139633887876e-05, |
| "loss": 4.11, |
| "step": 571904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0627753686377356e-05, |
| "loss": 4.1147, |
| "step": 572416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0619367738866836e-05, |
| "loss": 4.0942, |
| "step": 572928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0610998170160045e-05, |
| "loss": 4.1048, |
| "step": 573440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0602612222649525e-05, |
| "loss": 4.1102, |
| "step": 573952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0594226275139005e-05, |
| "loss": 4.0878, |
| "step": 574464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.058584032762849e-05, |
| "loss": 4.1087, |
| "step": 574976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.057745438011797e-05, |
| "loss": 4.1003, |
| "step": 575488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056906843260745e-05, |
| "loss": 4.1126, |
| "step": 576000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056069886390066e-05, |
| "loss": 4.1069, |
| "step": 576512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.055231291639014e-05, |
| "loss": 4.087, |
| "step": 577024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.054392696887962e-05, |
| "loss": 4.0899, |
| "step": 577536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.05355410213691e-05, |
| "loss": 4.0972, |
| "step": 578048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.052717145266231e-05, |
| "loss": 4.1098, |
| "step": 578560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051878550515179e-05, |
| "loss": 4.0956, |
| "step": 579072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051039955764127e-05, |
| "loss": 4.0953, |
| "step": 579584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.050202998893448e-05, |
| "loss": 4.0994, |
| "step": 580096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.049364404142396e-05, |
| "loss": 4.1026, |
| "step": 580608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0485258093913445e-05, |
| "loss": 4.0927, |
| "step": 581120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0476872146402925e-05, |
| "loss": 4.0963, |
| "step": 581632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0468486198892405e-05, |
| "loss": 4.1055, |
| "step": 582144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0460100251381885e-05, |
| "loss": 4.108, |
| "step": 582656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0451714303871365e-05, |
| "loss": 4.1004, |
| "step": 583168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0443328356360845e-05, |
| "loss": 4.0875, |
| "step": 583680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0434942408850325e-05, |
| "loss": 4.0993, |
| "step": 584192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.04265564613398e-05, |
| "loss": 4.093, |
| "step": 584704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0418186892633014e-05, |
| "loss": 4.1081, |
| "step": 585216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0409800945122494e-05, |
| "loss": 4.1034, |
| "step": 585728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0401414997611974e-05, |
| "loss": 4.0964, |
| "step": 586240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.039302905010145e-05, |
| "loss": 4.0943, |
| "step": 586752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.038464310259093e-05, |
| "loss": 4.0938, |
| "step": 587264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0376257155080414e-05, |
| "loss": 4.0871, |
| "step": 587776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0367871207569894e-05, |
| "loss": 4.0964, |
| "step": 588288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0359485260059374e-05, |
| "loss": 4.0822, |
| "step": 588800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0351099312548853e-05, |
| "loss": 4.1021, |
| "step": 589312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.03427461226458e-05, |
| "loss": 4.0901, |
| "step": 589824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.033436017513527e-05, |
| "loss": 4.0931, |
| "step": 590336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.032597422762475e-05, |
| "loss": 4.0916, |
| "step": 590848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031758828011423e-05, |
| "loss": 4.0887, |
| "step": 591360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030920233260371e-05, |
| "loss": 4.1008, |
| "step": 591872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030081638509319e-05, |
| "loss": 4.0889, |
| "step": 592384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.029243043758267e-05, |
| "loss": 4.1114, |
| "step": 592896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.028404449007215e-05, |
| "loss": 4.1029, |
| "step": 593408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.027567492136537e-05, |
| "loss": 4.0925, |
| "step": 593920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0267305352658576e-05, |
| "loss": 4.0986, |
| "step": 594432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0258919405148056e-05, |
| "loss": 4.0883, |
| "step": 594944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0250533457637536e-05, |
| "loss": 4.0947, |
| "step": 595456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0242147510127016e-05, |
| "loss": 4.1, |
| "step": 595968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0233761562616496e-05, |
| "loss": 4.0946, |
| "step": 596480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0225391993909705e-05, |
| "loss": 4.0945, |
| "step": 596992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0217006046399185e-05, |
| "loss": 4.0928, |
| "step": 597504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0208620098888665e-05, |
| "loss": 4.0957, |
| "step": 598016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0200234151378145e-05, |
| "loss": 4.0944, |
| "step": 598528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0191848203867625e-05, |
| "loss": 4.083, |
| "step": 599040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0183478635160834e-05, |
| "loss": 4.0973, |
| "step": 599552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0175092687650314e-05, |
| "loss": 4.0848, |
| "step": 600064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.01667067401398e-05, |
| "loss": 4.1054, |
| "step": 600576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.015833717143301e-05, |
| "loss": 4.0927, |
| "step": 601088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.014995122392249e-05, |
| "loss": 4.0958, |
| "step": 601600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.014156527641197e-05, |
| "loss": 4.0835, |
| "step": 602112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.013317932890145e-05, |
| "loss": 4.0966, |
| "step": 602624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.012479338139093e-05, |
| "loss": 4.0983, |
| "step": 603136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.011640743388041e-05, |
| "loss": 4.0946, |
| "step": 603648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010802148636989e-05, |
| "loss": 4.0901, |
| "step": 604160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.009963553885937e-05, |
| "loss": 4.0998, |
| "step": 604672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.009124959134885e-05, |
| "loss": 4.0931, |
| "step": 605184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.008286364383833e-05, |
| "loss": 4.1085, |
| "step": 605696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.007449407513154e-05, |
| "loss": 4.0846, |
| "step": 606208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.006610812762102e-05, |
| "loss": 4.1023, |
| "step": 606720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.00577221801105e-05, |
| "loss": 4.0927, |
| "step": 607232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0049336232599985e-05, |
| "loss": 4.0912, |
| "step": 607744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0040950285089465e-05, |
| "loss": 4.0925, |
| "step": 608256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.003256433757894e-05, |
| "loss": 4.093, |
| "step": 608768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0024194768872154e-05, |
| "loss": 4.0957, |
| "step": 609280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0015808821361634e-05, |
| "loss": 4.0795, |
| "step": 609792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.000742287385111e-05, |
| "loss": 4.0968, |
| "step": 610304 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.111083984375, |
| "eval_runtime": 297.8115, |
| "eval_samples_per_second": 1281.317, |
| "eval_steps_per_second": 40.042, |
| "step": 610560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.999903692634059e-05, |
| "loss": 4.084, |
| "step": 610816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.999065097883007e-05, |
| "loss": 4.0795, |
| "step": 611328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.998226503131955e-05, |
| "loss": 4.0928, |
| "step": 611840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.997387908380903e-05, |
| "loss": 4.103, |
| "step": 612352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9965509515102236e-05, |
| "loss": 4.0929, |
| "step": 612864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.995712356759172e-05, |
| "loss": 4.0862, |
| "step": 613376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.99487376200812e-05, |
| "loss": 4.0994, |
| "step": 613888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.994035167257068e-05, |
| "loss": 4.0776, |
| "step": 614400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.993196572506016e-05, |
| "loss": 4.0895, |
| "step": 614912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.992357977754964e-05, |
| "loss": 4.0988, |
| "step": 615424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.991519383003912e-05, |
| "loss": 4.0917, |
| "step": 615936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.990682426133233e-05, |
| "loss": 4.0938, |
| "step": 616448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.989843831382181e-05, |
| "loss": 4.0911, |
| "step": 616960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.989005236631129e-05, |
| "loss": 4.0823, |
| "step": 617472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.988166641880077e-05, |
| "loss": 4.08, |
| "step": 617984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.987329685009398e-05, |
| "loss": 4.0772, |
| "step": 618496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.986491090258346e-05, |
| "loss": 4.0845, |
| "step": 619008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.985652495507294e-05, |
| "loss": 4.0847, |
| "step": 619520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.984813900756242e-05, |
| "loss": 4.0828, |
| "step": 620032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.983975306005191e-05, |
| "loss": 4.0927, |
| "step": 620544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.983136711254139e-05, |
| "loss": 4.1026, |
| "step": 621056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.982298116503087e-05, |
| "loss": 4.0971, |
| "step": 621568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.981459521752035e-05, |
| "loss": 4.0868, |
| "step": 622080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9806242027617286e-05, |
| "loss": 4.0901, |
| "step": 622592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9797856080106766e-05, |
| "loss": 4.091, |
| "step": 623104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9789470132596246e-05, |
| "loss": 4.0863, |
| "step": 623616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9781084185085726e-05, |
| "loss": 4.086, |
| "step": 624128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9772698237575205e-05, |
| "loss": 4.0752, |
| "step": 624640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9764312290064685e-05, |
| "loss": 4.0834, |
| "step": 625152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.9755926342554165e-05, |
| "loss": 4.0786, |
| "step": 625664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9747540395043645e-05, |
| "loss": 4.08, |
| "step": 626176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.973917082633686e-05, |
| "loss": 4.0918, |
| "step": 626688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.973078487882634e-05, |
| "loss": 4.091, |
| "step": 627200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.972239893131582e-05, |
| "loss": 4.0872, |
| "step": 627712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9714012983805294e-05, |
| "loss": 4.0834, |
| "step": 628224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9705627036294774e-05, |
| "loss": 4.0927, |
| "step": 628736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9697241088784254e-05, |
| "loss": 4.0845, |
| "step": 629248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.968887152007747e-05, |
| "loss": 4.0758, |
| "step": 629760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.968048557256694e-05, |
| "loss": 4.0822, |
| "step": 630272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.967209962505642e-05, |
| "loss": 4.0731, |
| "step": 630784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.96637136775459e-05, |
| "loss": 4.079, |
| "step": 631296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.965534410883912e-05, |
| "loss": 4.0884, |
| "step": 631808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.96469581613286e-05, |
| "loss": 4.0774, |
| "step": 632320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.963857221381808e-05, |
| "loss": 4.0834, |
| "step": 632832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.963018626630756e-05, |
| "loss": 4.0928, |
| "step": 633344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.962180031879704e-05, |
| "loss": 4.0858, |
| "step": 633856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.961341437128652e-05, |
| "loss": 4.0904, |
| "step": 634368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9605028423776e-05, |
| "loss": 4.0796, |
| "step": 634880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.959664247626548e-05, |
| "loss": 4.0535, |
| "step": 635392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.958825652875496e-05, |
| "loss": 4.096, |
| "step": 635904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.957988696004817e-05, |
| "loss": 4.0806, |
| "step": 636416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.957150101253765e-05, |
| "loss": 4.0858, |
| "step": 636928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.956311506502713e-05, |
| "loss": 4.0814, |
| "step": 637440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.955472911751661e-05, |
| "loss": 4.0698, |
| "step": 637952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.954635954880982e-05, |
| "loss": 4.0667, |
| "step": 638464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.95379736012993e-05, |
| "loss": 4.0762, |
| "step": 638976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.952958765378878e-05, |
| "loss": 4.0701, |
| "step": 639488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9521201706278263e-05, |
| "loss": 4.07, |
| "step": 640000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.95128485163752e-05, |
| "loss": 4.0924, |
| "step": 640512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.950446256886468e-05, |
| "loss": 4.0702, |
| "step": 641024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.949607662135416e-05, |
| "loss": 4.0643, |
| "step": 641536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.948769067384364e-05, |
| "loss": 4.0889, |
| "step": 642048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.947930472633312e-05, |
| "loss": 4.058, |
| "step": 642560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.94709187788226e-05, |
| "loss": 4.0658, |
| "step": 643072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.946253283131208e-05, |
| "loss": 4.0791, |
| "step": 643584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.945414688380156e-05, |
| "loss": 4.0783, |
| "step": 644096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.944577731509477e-05, |
| "loss": 4.0663, |
| "step": 644608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9437407746387986e-05, |
| "loss": 4.0691, |
| "step": 645120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9429038177681195e-05, |
| "loss": 4.0639, |
| "step": 645632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9420652230170675e-05, |
| "loss": 4.0772, |
| "step": 646144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9412266282660155e-05, |
| "loss": 4.0761, |
| "step": 646656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9403880335149635e-05, |
| "loss": 4.0739, |
| "step": 647168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9395494387639115e-05, |
| "loss": 4.0768, |
| "step": 647680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9387108440128595e-05, |
| "loss": 4.0801, |
| "step": 648192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9378722492618075e-05, |
| "loss": 4.0861, |
| "step": 648704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9370336545107555e-05, |
| "loss": 4.0644, |
| "step": 649216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9361950597597035e-05, |
| "loss": 4.0784, |
| "step": 649728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9353564650086515e-05, |
| "loss": 4.0801, |
| "step": 650240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9345178702575995e-05, |
| "loss": 4.0546, |
| "step": 650752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9336792755065475e-05, |
| "loss": 4.0782, |
| "step": 651264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.9328423186358684e-05, |
| "loss": 4.076, |
| "step": 651776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.932003723884817e-05, |
| "loss": 4.0847, |
| "step": 652288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.931166767014138e-05, |
| "loss": 4.0783, |
| "step": 652800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.930328172263086e-05, |
| "loss": 4.0615, |
| "step": 653312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.929489577512034e-05, |
| "loss": 4.0576, |
| "step": 653824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.928650982760982e-05, |
| "loss": 4.0713, |
| "step": 654336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.92781238800993e-05, |
| "loss": 4.08, |
| "step": 654848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.926975431139251e-05, |
| "loss": 4.0703, |
| "step": 655360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.926136836388199e-05, |
| "loss": 4.0658, |
| "step": 655872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.925298241637147e-05, |
| "loss": 4.0694, |
| "step": 656384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.924459646886095e-05, |
| "loss": 4.0778, |
| "step": 656896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.923621052135042e-05, |
| "loss": 4.0655, |
| "step": 657408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.922782457383991e-05, |
| "loss": 4.0687, |
| "step": 657920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.921943862632939e-05, |
| "loss": 4.0735, |
| "step": 658432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.921105267881887e-05, |
| "loss": 4.0797, |
| "step": 658944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.920268311011208e-05, |
| "loss": 4.0711, |
| "step": 659456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.919429716260156e-05, |
| "loss": 4.062, |
| "step": 659968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.918591121509104e-05, |
| "loss": 4.0647, |
| "step": 660480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.917754164638425e-05, |
| "loss": 4.0666, |
| "step": 660992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9169155698873727e-05, |
| "loss": 4.0792, |
| "step": 661504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9160769751363206e-05, |
| "loss": 4.0775, |
| "step": 662016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9152383803852686e-05, |
| "loss": 4.0692, |
| "step": 662528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9143997856342166e-05, |
| "loss": 4.0656, |
| "step": 663040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9135611908831646e-05, |
| "loss": 4.061, |
| "step": 663552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9127225961321126e-05, |
| "loss": 4.0659, |
| "step": 664064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.911885639261434e-05, |
| "loss": 4.0663, |
| "step": 664576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.911047044510382e-05, |
| "loss": 4.0537, |
| "step": 665088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.91020844975933e-05, |
| "loss": 4.0753, |
| "step": 665600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.909369855008278e-05, |
| "loss": 4.0616, |
| "step": 666112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.908531260257226e-05, |
| "loss": 4.0636, |
| "step": 666624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.907694303386547e-05, |
| "loss": 4.0623, |
| "step": 667136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.906855708635495e-05, |
| "loss": 4.0582, |
| "step": 667648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.906017113884443e-05, |
| "loss": 4.0748, |
| "step": 668160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.905178519133391e-05, |
| "loss": 4.0591, |
| "step": 668672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.904339924382339e-05, |
| "loss": 4.0847, |
| "step": 669184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.903501329631287e-05, |
| "loss": 4.0748, |
| "step": 669696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.902662734880235e-05, |
| "loss": 4.0673, |
| "step": 670208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.901825778009556e-05, |
| "loss": 4.071, |
| "step": 670720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.900987183258504e-05, |
| "loss": 4.0641, |
| "step": 671232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.9001485885074527e-05, |
| "loss": 4.0662, |
| "step": 671744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8993099937564007e-05, |
| "loss": 4.0729, |
| "step": 672256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8984713990053487e-05, |
| "loss": 4.0644, |
| "step": 672768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8976344421346696e-05, |
| "loss": 4.0704, |
| "step": 673280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8967958473836176e-05, |
| "loss": 4.0602, |
| "step": 673792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8959572526325656e-05, |
| "loss": 4.068, |
| "step": 674304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8951186578815135e-05, |
| "loss": 4.0665, |
| "step": 674816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8942800631304615e-05, |
| "loss": 4.0552, |
| "step": 675328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.893441468379409e-05, |
| "loss": 4.0711, |
| "step": 675840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8926045115087304e-05, |
| "loss": 4.0577, |
| "step": 676352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8917659167576784e-05, |
| "loss": 4.0791, |
| "step": 676864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8909273220066264e-05, |
| "loss": 4.0658, |
| "step": 677376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8900887272555744e-05, |
| "loss": 4.0689, |
| "step": 677888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8892501325045224e-05, |
| "loss": 4.0553, |
| "step": 678400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8884115377534704e-05, |
| "loss": 4.0728, |
| "step": 678912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8875729430024184e-05, |
| "loss": 4.0628, |
| "step": 679424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8867343482513664e-05, |
| "loss": 4.0685, |
| "step": 679936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.885897391380687e-05, |
| "loss": 4.0644, |
| "step": 680448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.885058796629635e-05, |
| "loss": 4.0719, |
| "step": 680960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.884220201878583e-05, |
| "loss": 4.0658, |
| "step": 681472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.883381607127531e-05, |
| "loss": 4.0823, |
| "step": 681984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.882543012376479e-05, |
| "loss": 4.0557, |
| "step": 682496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.8817060555058e-05, |
| "loss": 4.0742, |
| "step": 683008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.880867460754748e-05, |
| "loss": 4.0677, |
| "step": 683520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.88003050388407e-05, |
| "loss": 4.0683, |
| "step": 684032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.879191909133018e-05, |
| "loss": 4.0594, |
| "step": 684544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.878353314381966e-05, |
| "loss": 4.0686, |
| "step": 685056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.877514719630914e-05, |
| "loss": 4.0655, |
| "step": 685568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.876676124879862e-05, |
| "loss": 4.0597, |
| "step": 686080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.87583753012881e-05, |
| "loss": 4.0683, |
| "step": 686592 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.091145992279053, |
| "eval_runtime": 296.5858, |
| "eval_samples_per_second": 1286.612, |
| "eval_steps_per_second": 40.208, |
| "step": 686880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.874998935377758e-05, |
| "loss": 4.0588, |
| "step": 687104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.874160340626706e-05, |
| "loss": 4.0519, |
| "step": 687616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.873321745875654e-05, |
| "loss": 4.0654, |
| "step": 688128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.872483151124602e-05, |
| "loss": 4.0749, |
| "step": 688640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.87164455637355e-05, |
| "loss": 4.0704, |
| "step": 689152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.870805961622497e-05, |
| "loss": 4.0567, |
| "step": 689664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.869967366871445e-05, |
| "loss": 4.0735, |
| "step": 690176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.869128772120393e-05, |
| "loss": 4.0532, |
| "step": 690688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.868290177369342e-05, |
| "loss": 4.0624, |
| "step": 691200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.86745158261829e-05, |
| "loss": 4.0739, |
| "step": 691712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.866612987867238e-05, |
| "loss": 4.0631, |
| "step": 692224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.865774393116186e-05, |
| "loss": 4.0678, |
| "step": 692736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8649374362455066e-05, |
| "loss": 4.0673, |
| "step": 693248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8640988414944546e-05, |
| "loss": 4.0565, |
| "step": 693760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8632602467434026e-05, |
| "loss": 4.0534, |
| "step": 694272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8624216519923506e-05, |
| "loss": 4.0525, |
| "step": 694784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8615830572412986e-05, |
| "loss": 4.0558, |
| "step": 695296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8607444624902466e-05, |
| "loss": 4.0596, |
| "step": 695808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8599058677391946e-05, |
| "loss": 4.0539, |
| "step": 696320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8590689108685155e-05, |
| "loss": 4.0663, |
| "step": 696832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8582303161174635e-05, |
| "loss": 4.073, |
| "step": 697344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8573917213664115e-05, |
| "loss": 4.0727, |
| "step": 697856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8565531266153595e-05, |
| "loss": 4.062, |
| "step": 698368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.855714531864308e-05, |
| "loss": 4.0592, |
| "step": 698880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.854875937113256e-05, |
| "loss": 4.067, |
| "step": 699392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.854037342362204e-05, |
| "loss": 4.0583, |
| "step": 699904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.8531987476111515e-05, |
| "loss": 4.0562, |
| "step": 700416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.852361790740473e-05, |
| "loss": 4.0543, |
| "step": 700928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.851523195989421e-05, |
| "loss": 4.055, |
| "step": 701440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.850684601238369e-05, |
| "loss": 4.0527, |
| "step": 701952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8498460064873164e-05, |
| "loss": 4.0545, |
| "step": 702464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8490074117362644e-05, |
| "loss": 4.0642, |
| "step": 702976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8481688169852124e-05, |
| "loss": 4.0693, |
| "step": 703488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8473302222341604e-05, |
| "loss": 4.0602, |
| "step": 704000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8464916274831084e-05, |
| "loss": 4.061, |
| "step": 704512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8456530327320564e-05, |
| "loss": 4.0577, |
| "step": 705024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.844816075861378e-05, |
| "loss": 4.0653, |
| "step": 705536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.843977481110326e-05, |
| "loss": 4.0488, |
| "step": 706048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.843138886359274e-05, |
| "loss": 4.0566, |
| "step": 706560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.842300291608222e-05, |
| "loss": 4.0454, |
| "step": 707072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.84146169685717e-05, |
| "loss": 4.057, |
| "step": 707584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.840623102106118e-05, |
| "loss": 4.0583, |
| "step": 708096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.839784507355066e-05, |
| "loss": 4.0532, |
| "step": 708608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.838945912604014e-05, |
| "loss": 4.058, |
| "step": 709120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.838108955733335e-05, |
| "loss": 4.064, |
| "step": 709632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.837270360982283e-05, |
| "loss": 4.0599, |
| "step": 710144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.836431766231231e-05, |
| "loss": 4.0645, |
| "step": 710656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.835593171480179e-05, |
| "loss": 4.0572, |
| "step": 711168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.834754576729127e-05, |
| "loss": 4.0266, |
| "step": 711680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.833915981978075e-05, |
| "loss": 4.0685, |
| "step": 712192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8330790251073964e-05, |
| "loss": 4.0555, |
| "step": 712704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8322404303563444e-05, |
| "loss": 4.0629, |
| "step": 713216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8314018356052924e-05, |
| "loss": 4.0563, |
| "step": 713728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8305632408542404e-05, |
| "loss": 4.0449, |
| "step": 714240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.829729559744307e-05, |
| "loss": 4.0413, |
| "step": 714752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828890964993255e-05, |
| "loss": 4.0476, |
| "step": 715264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.828052370242203e-05, |
| "loss": 4.0484, |
| "step": 715776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.827213775491151e-05, |
| "loss": 4.0416, |
| "step": 716288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.826375180740099e-05, |
| "loss": 4.0663, |
| "step": 716800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.825536585989047e-05, |
| "loss": 4.0483, |
| "step": 717312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.824697991237996e-05, |
| "loss": 4.034, |
| "step": 717824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.823859396486944e-05, |
| "loss": 4.0664, |
| "step": 718336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.823020801735892e-05, |
| "loss": 4.0366, |
| "step": 718848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.82218220698484e-05, |
| "loss": 4.0395, |
| "step": 719360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.821343612233788e-05, |
| "loss": 4.0551, |
| "step": 719872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.820505017482735e-05, |
| "loss": 4.0496, |
| "step": 720384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8196680606120567e-05, |
| "loss": 4.0445, |
| "step": 720896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8188311037413776e-05, |
| "loss": 4.0442, |
| "step": 721408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8179925089903256e-05, |
| "loss": 4.0392, |
| "step": 721920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8171539142392736e-05, |
| "loss": 4.0515, |
| "step": 722432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8163153194882216e-05, |
| "loss": 4.0516, |
| "step": 722944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8154767247371695e-05, |
| "loss": 4.0522, |
| "step": 723456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8146381299861175e-05, |
| "loss": 4.05, |
| "step": 723968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8137995352350655e-05, |
| "loss": 4.0539, |
| "step": 724480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8129609404840135e-05, |
| "loss": 4.0641, |
| "step": 724992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.812123983613335e-05, |
| "loss": 4.0376, |
| "step": 725504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8112853888622824e-05, |
| "loss": 4.05, |
| "step": 726016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.810448431991604e-05, |
| "loss": 4.0589, |
| "step": 726528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.809609837240552e-05, |
| "loss": 4.0332, |
| "step": 727040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.8087712424894993e-05, |
| "loss": 4.0482, |
| "step": 727552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.807932647738447e-05, |
| "loss": 4.0552, |
| "step": 728064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.807095690867769e-05, |
| "loss": 4.0572, |
| "step": 728576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.806257096116717e-05, |
| "loss": 4.0584, |
| "step": 729088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.805418501365665e-05, |
| "loss": 4.0343, |
| "step": 729600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.804579906614613e-05, |
| "loss": 4.032, |
| "step": 730112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.803741311863561e-05, |
| "loss": 4.0487, |
| "step": 730624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802902717112509e-05, |
| "loss": 4.0529, |
| "step": 731136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.802064122361457e-05, |
| "loss": 4.0416, |
| "step": 731648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.801227165490778e-05, |
| "loss": 4.046, |
| "step": 732160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.800388570739726e-05, |
| "loss": 4.0429, |
| "step": 732672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.799549975988674e-05, |
| "loss": 4.0523, |
| "step": 733184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.798711381237622e-05, |
| "loss": 4.0456, |
| "step": 733696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.79787278648657e-05, |
| "loss": 4.0431, |
| "step": 734208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.797034191735518e-05, |
| "loss": 4.0458, |
| "step": 734720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.796195596984466e-05, |
| "loss": 4.0551, |
| "step": 735232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.795357002233414e-05, |
| "loss": 4.05, |
| "step": 735744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.794518407482362e-05, |
| "loss": 4.039, |
| "step": 736256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.793681450611683e-05, |
| "loss": 4.0388, |
| "step": 736768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7928428558606314e-05, |
| "loss": 4.0434, |
| "step": 737280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7920042611095793e-05, |
| "loss": 4.0532, |
| "step": 737792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7911656663585273e-05, |
| "loss": 4.0511, |
| "step": 738304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7903270716074753e-05, |
| "loss": 4.0427, |
| "step": 738816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.789490114736796e-05, |
| "loss": 4.0446, |
| "step": 739328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.788651519985744e-05, |
| "loss": 4.0355, |
| "step": 739840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.787812925234692e-05, |
| "loss": 4.0433, |
| "step": 740352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.78697433048364e-05, |
| "loss": 4.0372, |
| "step": 740864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.786139011493334e-05, |
| "loss": 4.0327, |
| "step": 741376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.785300416742282e-05, |
| "loss": 4.0502, |
| "step": 741888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.78446182199123e-05, |
| "loss": 4.037, |
| "step": 742400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.783623227240178e-05, |
| "loss": 4.0389, |
| "step": 742912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.782784632489127e-05, |
| "loss": 4.0381, |
| "step": 743424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.781946037738075e-05, |
| "loss": 4.0349, |
| "step": 743936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7811090808673956e-05, |
| "loss": 4.0493, |
| "step": 744448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7802704861163436e-05, |
| "loss": 4.0379, |
| "step": 744960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7794318913652916e-05, |
| "loss": 4.059, |
| "step": 745472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7785932966142396e-05, |
| "loss": 4.0512, |
| "step": 745984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7777547018631876e-05, |
| "loss": 4.0443, |
| "step": 746496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7769177449925085e-05, |
| "loss": 4.0464, |
| "step": 747008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7760791502414565e-05, |
| "loss": 4.0424, |
| "step": 747520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7752405554904045e-05, |
| "loss": 4.0411, |
| "step": 748032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7744019607393525e-05, |
| "loss": 4.052, |
| "step": 748544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7735633659883005e-05, |
| "loss": 4.046, |
| "step": 749056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7727247712372485e-05, |
| "loss": 4.0431, |
| "step": 749568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7718861764861965e-05, |
| "loss": 4.0358, |
| "step": 750080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7710475817351445e-05, |
| "loss": 4.0449, |
| "step": 750592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7702089869840925e-05, |
| "loss": 4.0407, |
| "step": 751104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7693720301134134e-05, |
| "loss": 4.0335, |
| "step": 751616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.768535073242735e-05, |
| "loss": 4.0483, |
| "step": 752128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.767696478491683e-05, |
| "loss": 4.0264, |
| "step": 752640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.76685788374063e-05, |
| "loss": 4.0597, |
| "step": 753152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.766019288989578e-05, |
| "loss": 4.0393, |
| "step": 753664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7651823321189e-05, |
| "loss": 4.0458, |
| "step": 754176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.764343737367848e-05, |
| "loss": 4.0329, |
| "step": 754688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.763505142616796e-05, |
| "loss": 4.0502, |
| "step": 755200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.762666547865744e-05, |
| "loss": 4.0411, |
| "step": 755712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.761827953114692e-05, |
| "loss": 4.0456, |
| "step": 756224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.76098935836364e-05, |
| "loss": 4.0428, |
| "step": 756736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.760150763612588e-05, |
| "loss": 4.0444, |
| "step": 757248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.759313806741909e-05, |
| "loss": 4.0436, |
| "step": 757760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.758475211990857e-05, |
| "loss": 4.061, |
| "step": 758272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.757636617239805e-05, |
| "loss": 4.0313, |
| "step": 758784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.756798022488753e-05, |
| "loss": 4.0517, |
| "step": 759296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.755959427737701e-05, |
| "loss": 4.0474, |
| "step": 759808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.755120832986649e-05, |
| "loss": 4.0444, |
| "step": 760320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.754282238235597e-05, |
| "loss": 4.0347, |
| "step": 760832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.753443643484545e-05, |
| "loss": 4.0475, |
| "step": 761344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.7526066866138656e-05, |
| "loss": 4.0399, |
| "step": 761856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.751769729743187e-05, |
| "loss": 4.0383, |
| "step": 762368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.750931134992135e-05, |
| "loss": 4.0422, |
| "step": 762880 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.075286388397217, |
| "eval_runtime": 297.3176, |
| "eval_samples_per_second": 1283.446, |
| "eval_steps_per_second": 40.109, |
| "step": 763200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.750092540241083e-05, |
| "loss": 4.0338, |
| "step": 763392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.749253945490031e-05, |
| "loss": 4.0269, |
| "step": 763904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.748415350738979e-05, |
| "loss": 4.0427, |
| "step": 764416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.747576755987927e-05, |
| "loss": 4.0521, |
| "step": 764928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.746738161236875e-05, |
| "loss": 4.049, |
| "step": 765440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745899566485823e-05, |
| "loss": 4.0356, |
| "step": 765952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.745062609615144e-05, |
| "loss": 4.0442, |
| "step": 766464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.744224014864092e-05, |
| "loss": 4.0339, |
| "step": 766976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.74338542011304e-05, |
| "loss": 4.0371, |
| "step": 767488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.742548463242361e-05, |
| "loss": 4.0504, |
| "step": 768000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.741709868491309e-05, |
| "loss": 4.0417, |
| "step": 768512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.740871273740258e-05, |
| "loss": 4.047, |
| "step": 769024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7400343168695786e-05, |
| "loss": 4.0457, |
| "step": 769536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7391957221185266e-05, |
| "loss": 4.0314, |
| "step": 770048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7383571273674746e-05, |
| "loss": 4.0305, |
| "step": 770560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7375185326164226e-05, |
| "loss": 4.0254, |
| "step": 771072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7366799378653706e-05, |
| "loss": 4.0383, |
| "step": 771584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7358413431143186e-05, |
| "loss": 4.0374, |
| "step": 772096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7350027483632666e-05, |
| "loss": 4.0283, |
| "step": 772608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.734164153612214e-05, |
| "loss": 4.0437, |
| "step": 773120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.733325558861162e-05, |
| "loss": 4.0503, |
| "step": 773632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7324886019904835e-05, |
| "loss": 4.0462, |
| "step": 774144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7316500072394315e-05, |
| "loss": 4.0425, |
| "step": 774656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7308114124883794e-05, |
| "loss": 4.04, |
| "step": 775168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7299728177373274e-05, |
| "loss": 4.044, |
| "step": 775680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7291342229862754e-05, |
| "loss": 4.0401, |
| "step": 776192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7282972661155963e-05, |
| "loss": 4.0302, |
| "step": 776704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.7274586713645443e-05, |
| "loss": 4.0281, |
| "step": 777216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.726620076613492e-05, |
| "loss": 4.0358, |
| "step": 777728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.72578148186244e-05, |
| "loss": 4.0317, |
| "step": 778240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.724942887111388e-05, |
| "loss": 4.033, |
| "step": 778752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.724104292360336e-05, |
| "loss": 4.0397, |
| "step": 779264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.723265697609284e-05, |
| "loss": 4.0427, |
| "step": 779776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.722427102858232e-05, |
| "loss": 4.0437, |
| "step": 780288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.721590145987553e-05, |
| "loss": 4.0313, |
| "step": 780800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.720753189116875e-05, |
| "loss": 4.0414, |
| "step": 781312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.719914594365823e-05, |
| "loss": 4.0454, |
| "step": 781824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.719075999614771e-05, |
| "loss": 4.0211, |
| "step": 782336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.718237404863719e-05, |
| "loss": 4.0352, |
| "step": 782848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.717398810112667e-05, |
| "loss": 4.0257, |
| "step": 783360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.716560215361615e-05, |
| "loss": 4.0348, |
| "step": 783872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.715721620610563e-05, |
| "loss": 4.0329, |
| "step": 784384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.714883025859511e-05, |
| "loss": 4.0312, |
| "step": 784896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.714046068988832e-05, |
| "loss": 4.0374, |
| "step": 785408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7132091121181526e-05, |
| "loss": 4.0429, |
| "step": 785920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7123705173671006e-05, |
| "loss": 4.0402, |
| "step": 786432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7115319226160486e-05, |
| "loss": 4.0448, |
| "step": 786944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.7106933278649966e-05, |
| "loss": 4.0326, |
| "step": 787456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.709854733113945e-05, |
| "loss": 4.0076, |
| "step": 787968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.709017776243266e-05, |
| "loss": 4.0449, |
| "step": 788480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.708179181492214e-05, |
| "loss": 4.0339, |
| "step": 788992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.707340586741162e-05, |
| "loss": 4.0415, |
| "step": 789504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.70650199199011e-05, |
| "loss": 4.032, |
| "step": 790016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.705663397239058e-05, |
| "loss": 4.025, |
| "step": 790528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.704824802488006e-05, |
| "loss": 4.0191, |
| "step": 791040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.703987845617327e-05, |
| "loss": 4.0247, |
| "step": 791552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.703149250866275e-05, |
| "loss": 4.0296, |
| "step": 792064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.702310656115223e-05, |
| "loss": 4.0167, |
| "step": 792576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.701472061364171e-05, |
| "loss": 4.0447, |
| "step": 793088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.700633466613119e-05, |
| "loss": 4.0274, |
| "step": 793600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.699794871862067e-05, |
| "loss": 4.0142, |
| "step": 794112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.698956277111015e-05, |
| "loss": 4.0412, |
| "step": 794624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.698117682359963e-05, |
| "loss": 4.0169, |
| "step": 795136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6972807254892846e-05, |
| "loss": 4.0194, |
| "step": 795648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6964421307382326e-05, |
| "loss": 4.0336, |
| "step": 796160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.69560353598718e-05, |
| "loss": 4.0288, |
| "step": 796672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.694764941236128e-05, |
| "loss": 4.0223, |
| "step": 797184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6939279843654495e-05, |
| "loss": 4.0231, |
| "step": 797696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6930893896143975e-05, |
| "loss": 4.0204, |
| "step": 798208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6922524327437184e-05, |
| "loss": 4.0274, |
| "step": 798720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6914138379926664e-05, |
| "loss": 4.0352, |
| "step": 799232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.690576881121987e-05, |
| "loss": 4.0277, |
| "step": 799744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.689739924251309e-05, |
| "loss": 4.0289, |
| "step": 800256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.688901329500257e-05, |
| "loss": 4.028, |
| "step": 800768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.688062734749205e-05, |
| "loss": 4.0449, |
| "step": 801280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.687224139998153e-05, |
| "loss": 4.0193, |
| "step": 801792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.686385545247101e-05, |
| "loss": 4.0304, |
| "step": 802304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.685546950496049e-05, |
| "loss": 4.0318, |
| "step": 802816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.684708355744997e-05, |
| "loss": 4.017, |
| "step": 803328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.683869760993945e-05, |
| "loss": 4.0282, |
| "step": 803840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.683031166242892e-05, |
| "loss": 4.0334, |
| "step": 804352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.68219257149184e-05, |
| "loss": 4.0373, |
| "step": 804864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.681353976740788e-05, |
| "loss": 4.04, |
| "step": 805376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.680515381989736e-05, |
| "loss": 4.0103, |
| "step": 805888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.679676787238684e-05, |
| "loss": 4.012, |
| "step": 806400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6788414682483794e-05, |
| "loss": 4.0264, |
| "step": 806912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6780028734973274e-05, |
| "loss": 4.0324, |
| "step": 807424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.677164278746275e-05, |
| "loss": 4.0199, |
| "step": 807936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.676325683995223e-05, |
| "loss": 4.0247, |
| "step": 808448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.675487089244171e-05, |
| "loss": 4.0235, |
| "step": 808960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6746484944931187e-05, |
| "loss": 4.0315, |
| "step": 809472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6738098997420667e-05, |
| "loss": 4.0224, |
| "step": 809984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6729713049910146e-05, |
| "loss": 4.0218, |
| "step": 810496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6721327102399626e-05, |
| "loss": 4.0283, |
| "step": 811008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6712941154889106e-05, |
| "loss": 4.0338, |
| "step": 811520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6704555207378586e-05, |
| "loss": 4.0309, |
| "step": 812032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6696169259868066e-05, |
| "loss": 4.0172, |
| "step": 812544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6687799691161275e-05, |
| "loss": 4.0177, |
| "step": 813056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.667941374365076e-05, |
| "loss": 4.026, |
| "step": 813568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.667104417494397e-05, |
| "loss": 4.0313, |
| "step": 814080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.666265822743345e-05, |
| "loss": 4.0337, |
| "step": 814592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.665427227992293e-05, |
| "loss": 4.0209, |
| "step": 815104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.664588633241241e-05, |
| "loss": 4.0229, |
| "step": 815616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.663751676370562e-05, |
| "loss": 4.0166, |
| "step": 816128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.66291308161951e-05, |
| "loss": 4.0225, |
| "step": 816640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.662074486868458e-05, |
| "loss": 4.0165, |
| "step": 817152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.661235892117406e-05, |
| "loss": 4.0161, |
| "step": 817664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.660397297366354e-05, |
| "loss": 4.0286, |
| "step": 818176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.659558702615302e-05, |
| "loss": 4.0175, |
| "step": 818688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.65872010786425e-05, |
| "loss": 4.0172, |
| "step": 819200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.657881513113198e-05, |
| "loss": 4.0171, |
| "step": 819712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6570445562425196e-05, |
| "loss": 4.0163, |
| "step": 820224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6562059614914676e-05, |
| "loss": 4.0274, |
| "step": 820736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6553673667404156e-05, |
| "loss": 4.0225, |
| "step": 821248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6545287719893636e-05, |
| "loss": 4.0328, |
| "step": 821760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6536918151186845e-05, |
| "loss": 4.029, |
| "step": 822272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6528532203676325e-05, |
| "loss": 4.0265, |
| "step": 822784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6520162634969534e-05, |
| "loss": 4.0261, |
| "step": 823296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6511776687459014e-05, |
| "loss": 4.0206, |
| "step": 823808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.650340711875222e-05, |
| "loss": 4.0222, |
| "step": 824320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.64950211712417e-05, |
| "loss": 4.0304, |
| "step": 824832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.648663522373118e-05, |
| "loss": 4.0222, |
| "step": 825344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.647824927622067e-05, |
| "loss": 4.0269, |
| "step": 825856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.646986332871015e-05, |
| "loss": 4.0125, |
| "step": 826368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.646147738119963e-05, |
| "loss": 4.0272, |
| "step": 826880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.645309143368911e-05, |
| "loss": 4.0178, |
| "step": 827392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.644470548617858e-05, |
| "loss": 4.0154, |
| "step": 827904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.643631953866806e-05, |
| "loss": 4.0251, |
| "step": 828416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.642793359115754e-05, |
| "loss": 4.0065, |
| "step": 828928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.641954764364702e-05, |
| "loss": 4.0434, |
| "step": 829440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.64111616961365e-05, |
| "loss": 4.017, |
| "step": 829952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.640277574862598e-05, |
| "loss": 4.0275, |
| "step": 830464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.639438980111546e-05, |
| "loss": 4.0121, |
| "step": 830976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.638602023240867e-05, |
| "loss": 4.0273, |
| "step": 831488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.637763428489815e-05, |
| "loss": 4.0201, |
| "step": 832000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.636924833738764e-05, |
| "loss": 4.027, |
| "step": 832512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.636087876868085e-05, |
| "loss": 4.0221, |
| "step": 833024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.635249282117033e-05, |
| "loss": 4.0222, |
| "step": 833536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.634410687365981e-05, |
| "loss": 4.0234, |
| "step": 834048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.633572092614929e-05, |
| "loss": 4.0428, |
| "step": 834560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.632733497863877e-05, |
| "loss": 4.008, |
| "step": 835072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.631894903112825e-05, |
| "loss": 4.0352, |
| "step": 835584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.631056308361773e-05, |
| "loss": 4.029, |
| "step": 836096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.630217713610721e-05, |
| "loss": 4.0215, |
| "step": 836608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.629379118859669e-05, |
| "loss": 4.0154, |
| "step": 837120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6285421619889896e-05, |
| "loss": 4.0262, |
| "step": 837632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.6277035672379376e-05, |
| "loss": 4.0205, |
| "step": 838144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.626866610367259e-05, |
| "loss": 4.0221, |
| "step": 838656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.626028015616207e-05, |
| "loss": 4.0192, |
| "step": 839168 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.062204360961914, |
| "eval_runtime": 293.2429, |
| "eval_samples_per_second": 1301.28, |
| "eval_steps_per_second": 40.666, |
| "step": 839520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.625189420865155e-05, |
| "loss": 4.0161, |
| "step": 839680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.624350826114103e-05, |
| "loss": 4.0081, |
| "step": 840192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.623512231363051e-05, |
| "loss": 4.0203, |
| "step": 840704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.622673636611999e-05, |
| "loss": 4.0317, |
| "step": 841216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.621835041860947e-05, |
| "loss": 4.0298, |
| "step": 841728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6209964471098945e-05, |
| "loss": 4.0174, |
| "step": 842240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6201578523588425e-05, |
| "loss": 4.028, |
| "step": 842752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6193192576077905e-05, |
| "loss": 4.0164, |
| "step": 843264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6184806628567384e-05, |
| "loss": 4.0147, |
| "step": 843776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6176420681056864e-05, |
| "loss": 4.026, |
| "step": 844288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6168034733546344e-05, |
| "loss": 4.0278, |
| "step": 844800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6159648786035824e-05, |
| "loss": 4.028, |
| "step": 845312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6151262838525304e-05, |
| "loss": 4.0231, |
| "step": 845824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6142876891014784e-05, |
| "loss": 4.0132, |
| "step": 846336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.613449094350427e-05, |
| "loss": 4.015, |
| "step": 846848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.612610499599375e-05, |
| "loss": 4.0014, |
| "step": 847360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.611771904848323e-05, |
| "loss": 4.0204, |
| "step": 847872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.610934947977644e-05, |
| "loss": 4.0179, |
| "step": 848384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.610096353226592e-05, |
| "loss": 4.0092, |
| "step": 848896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.60925775847554e-05, |
| "loss": 4.0231, |
| "step": 849408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.608419163724488e-05, |
| "loss": 4.0314, |
| "step": 849920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.607580568973436e-05, |
| "loss": 4.0262, |
| "step": 850432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.606741974222383e-05, |
| "loss": 4.0249, |
| "step": 850944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605903379471331e-05, |
| "loss": 4.0198, |
| "step": 851456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.605064784720279e-05, |
| "loss": 4.0236, |
| "step": 851968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.604227827849601e-05, |
| "loss": 4.0201, |
| "step": 852480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.603389233098549e-05, |
| "loss": 4.0106, |
| "step": 852992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.602550638347497e-05, |
| "loss": 4.0076, |
| "step": 853504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.601712043596445e-05, |
| "loss": 4.0178, |
| "step": 854016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.600875086725766e-05, |
| "loss": 4.0124, |
| "step": 854528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.600036491974714e-05, |
| "loss": 4.0175, |
| "step": 855040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.599197897223662e-05, |
| "loss": 4.0181, |
| "step": 855552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.59835930247261e-05, |
| "loss": 4.0189, |
| "step": 856064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.597522345601931e-05, |
| "loss": 4.028, |
| "step": 856576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.596683750850879e-05, |
| "loss": 4.0104, |
| "step": 857088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5958467939802e-05, |
| "loss": 4.0215, |
| "step": 857600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5950081992291476e-05, |
| "loss": 4.0249, |
| "step": 858112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.594169604478096e-05, |
| "loss": 4.0016, |
| "step": 858624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.593332647607418e-05, |
| "loss": 4.0188, |
| "step": 859136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.592494052856366e-05, |
| "loss": 4.009, |
| "step": 859648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.591655458105313e-05, |
| "loss": 4.0113, |
| "step": 860160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.590816863354261e-05, |
| "loss": 4.0161, |
| "step": 860672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.589978268603209e-05, |
| "loss": 4.0133, |
| "step": 861184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.589139673852157e-05, |
| "loss": 4.0176, |
| "step": 861696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.588301079101105e-05, |
| "loss": 4.0235, |
| "step": 862208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.587462484350053e-05, |
| "loss": 4.0219, |
| "step": 862720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.586623889599001e-05, |
| "loss": 4.0241, |
| "step": 863232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.585785294847949e-05, |
| "loss": 4.0142, |
| "step": 863744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.584946700096897e-05, |
| "loss": 3.9888, |
| "step": 864256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.584108105345845e-05, |
| "loss": 4.0248, |
| "step": 864768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5832727863555396e-05, |
| "loss": 4.0127, |
| "step": 865280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5824341916044876e-05, |
| "loss": 4.0241, |
| "step": 865792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5815955968534356e-05, |
| "loss": 4.0131, |
| "step": 866304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5807570021023836e-05, |
| "loss": 4.0026, |
| "step": 866816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5799184073513316e-05, |
| "loss": 4.0073, |
| "step": 867328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5790798126002796e-05, |
| "loss": 4.0006, |
| "step": 867840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5782412178492276e-05, |
| "loss": 4.0122, |
| "step": 868352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5774042609785485e-05, |
| "loss": 3.9934, |
| "step": 868864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5765656662274965e-05, |
| "loss": 4.0311, |
| "step": 869376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5757270714764445e-05, |
| "loss": 4.0111, |
| "step": 869888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5748884767253925e-05, |
| "loss": 3.9913, |
| "step": 870400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5740515198547134e-05, |
| "loss": 4.0199, |
| "step": 870912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5732129251036614e-05, |
| "loss": 3.9995, |
| "step": 871424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5723743303526094e-05, |
| "loss": 4.0023, |
| "step": 871936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.571535735601558e-05, |
| "loss": 4.0097, |
| "step": 872448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.570698778730879e-05, |
| "loss": 4.0104, |
| "step": 872960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569860183979827e-05, |
| "loss": 4.0038, |
| "step": 873472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569021589228775e-05, |
| "loss": 4.0086, |
| "step": 873984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.568182994477723e-05, |
| "loss": 3.9977, |
| "step": 874496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.567344399726671e-05, |
| "loss": 4.0084, |
| "step": 875008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.566505804975619e-05, |
| "loss": 4.0159, |
| "step": 875520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.565667210224566e-05, |
| "loss": 4.0083, |
| "step": 876032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564828615473514e-05, |
| "loss": 4.015, |
| "step": 876544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.563990020722462e-05, |
| "loss": 4.0046, |
| "step": 877056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.56315142597141e-05, |
| "loss": 4.0298, |
| "step": 877568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.562314469100732e-05, |
| "loss": 3.9983, |
| "step": 878080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.56147587434968e-05, |
| "loss": 4.0142, |
| "step": 878592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.560637279598628e-05, |
| "loss": 4.0047, |
| "step": 879104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5598003227279494e-05, |
| "loss": 4.0062, |
| "step": 879616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.558961727976897e-05, |
| "loss": 4.0054, |
| "step": 880128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.558123133225845e-05, |
| "loss": 4.0155, |
| "step": 880640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.557284538474793e-05, |
| "loss": 4.0175, |
| "step": 881152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.556445943723741e-05, |
| "loss": 4.019, |
| "step": 881664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.555607348972689e-05, |
| "loss": 3.9923, |
| "step": 882176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.554768754221637e-05, |
| "loss": 3.9937, |
| "step": 882688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.553930159470585e-05, |
| "loss": 4.006, |
| "step": 883200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5530932025999056e-05, |
| "loss": 4.0142, |
| "step": 883712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5522546078488536e-05, |
| "loss": 4.001, |
| "step": 884224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.551417650978175e-05, |
| "loss": 4.0042, |
| "step": 884736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.550579056227123e-05, |
| "loss": 4.0089, |
| "step": 885248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.549740461476071e-05, |
| "loss": 4.0077, |
| "step": 885760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.548901866725019e-05, |
| "loss": 4.0072, |
| "step": 886272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.548063271973967e-05, |
| "loss": 4.0046, |
| "step": 886784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.547226315103288e-05, |
| "loss": 4.0097, |
| "step": 887296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.546387720352236e-05, |
| "loss": 4.0122, |
| "step": 887808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.545549125601184e-05, |
| "loss": 4.016, |
| "step": 888320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.544710530850132e-05, |
| "loss": 3.9962, |
| "step": 888832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.54387193609908e-05, |
| "loss": 3.9984, |
| "step": 889344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543033341348028e-05, |
| "loss": 4.0086, |
| "step": 889856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.542196384477349e-05, |
| "loss": 4.0111, |
| "step": 890368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.541357789726297e-05, |
| "loss": 4.0167, |
| "step": 890880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5405191949752456e-05, |
| "loss": 4.0034, |
| "step": 891392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5396806002241936e-05, |
| "loss": 4.0081, |
| "step": 891904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5388420054731416e-05, |
| "loss": 3.9983, |
| "step": 892416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5380034107220896e-05, |
| "loss": 4.0032, |
| "step": 892928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5371648159710376e-05, |
| "loss": 4.0, |
| "step": 893440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5363278591003585e-05, |
| "loss": 3.9973, |
| "step": 893952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5354892643493065e-05, |
| "loss": 4.0059, |
| "step": 894464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5346506695982545e-05, |
| "loss": 4.0015, |
| "step": 894976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5338120748472025e-05, |
| "loss": 3.9986, |
| "step": 895488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.53297348009615e-05, |
| "loss": 3.9998, |
| "step": 896000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.532134885345098e-05, |
| "loss": 3.9966, |
| "step": 896512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5312979284744194e-05, |
| "loss": 4.0101, |
| "step": 897024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5304593337233674e-05, |
| "loss": 4.0037, |
| "step": 897536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5296207389723154e-05, |
| "loss": 4.013, |
| "step": 898048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5287821442212634e-05, |
| "loss": 4.0066, |
| "step": 898560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5279435494702114e-05, |
| "loss": 4.0125, |
| "step": 899072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.527106592599532e-05, |
| "loss": 4.0091, |
| "step": 899584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.52626799784848e-05, |
| "loss": 3.9967, |
| "step": 900096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.525429403097428e-05, |
| "loss": 4.0097, |
| "step": 900608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.524590808346376e-05, |
| "loss": 4.0065, |
| "step": 901120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.523752213595324e-05, |
| "loss": 4.0034, |
| "step": 901632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.522913618844272e-05, |
| "loss": 4.0128, |
| "step": 902144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.52207502409322e-05, |
| "loss": 3.9994, |
| "step": 902656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.521236429342168e-05, |
| "loss": 4.0046, |
| "step": 903168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.520397834591116e-05, |
| "loss": 3.9991, |
| "step": 903680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.519559239840064e-05, |
| "loss": 4.0001, |
| "step": 904192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.518722282969386e-05, |
| "loss": 4.0033, |
| "step": 904704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517883688218334e-05, |
| "loss": 3.9941, |
| "step": 905216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517045093467282e-05, |
| "loss": 4.0219, |
| "step": 905728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.51620649871623e-05, |
| "loss": 4.0043, |
| "step": 906240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.515369541845551e-05, |
| "loss": 4.0093, |
| "step": 906752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.514530947094499e-05, |
| "loss": 3.9958, |
| "step": 907264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.513692352343447e-05, |
| "loss": 4.0089, |
| "step": 907776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512853757592395e-05, |
| "loss": 4.0056, |
| "step": 908288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512015162841343e-05, |
| "loss": 4.0099, |
| "step": 908800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5111782059706636e-05, |
| "loss": 4.0034, |
| "step": 909312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5103396112196116e-05, |
| "loss": 4.0028, |
| "step": 909824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5095010164685596e-05, |
| "loss": 4.0035, |
| "step": 910336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5086624217175076e-05, |
| "loss": 4.0256, |
| "step": 910848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.507825464846829e-05, |
| "loss": 3.9957, |
| "step": 911360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.506986870095777e-05, |
| "loss": 4.0164, |
| "step": 911872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.506148275344725e-05, |
| "loss": 4.0115, |
| "step": 912384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.505309680593673e-05, |
| "loss": 4.001, |
| "step": 912896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.504471085842621e-05, |
| "loss": 4.005, |
| "step": 913408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5036324910915685e-05, |
| "loss": 4.0036, |
| "step": 913920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5027938963405165e-05, |
| "loss": 4.0025, |
| "step": 914432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.501956939469838e-05, |
| "loss": 4.0033, |
| "step": 914944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.501118344718786e-05, |
| "loss": 4.0024, |
| "step": 915456 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.050577640533447, |
| "eval_runtime": 287.2056, |
| "eval_samples_per_second": 1328.634, |
| "eval_steps_per_second": 41.521, |
| "step": 915840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.5002797499677334e-05, |
| "loss": 4.0017, |
| "step": 915968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4994411552166814e-05, |
| "loss": 3.9868, |
| "step": 916480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4986025604656294e-05, |
| "loss": 4.0009, |
| "step": 916992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4977672414753246e-05, |
| "loss": 4.019, |
| "step": 917504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4969286467242726e-05, |
| "loss": 4.0082, |
| "step": 918016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4960900519732206e-05, |
| "loss": 4.0008, |
| "step": 918528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4952514572221686e-05, |
| "loss": 4.0154, |
| "step": 919040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.494412862471116e-05, |
| "loss": 3.9952, |
| "step": 919552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4935759056004375e-05, |
| "loss": 3.9963, |
| "step": 920064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4927373108493855e-05, |
| "loss": 4.0112, |
| "step": 920576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4918987160983335e-05, |
| "loss": 4.0104, |
| "step": 921088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491060121347281e-05, |
| "loss": 4.0069, |
| "step": 921600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4902231644766024e-05, |
| "loss": 4.0124, |
| "step": 922112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4893845697255504e-05, |
| "loss": 3.9931, |
| "step": 922624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4885459749744984e-05, |
| "loss": 3.995, |
| "step": 923136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4877073802234464e-05, |
| "loss": 3.9869, |
| "step": 923648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4868687854723944e-05, |
| "loss": 4.0029, |
| "step": 924160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4860301907213424e-05, |
| "loss": 4.0024, |
| "step": 924672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4851915959702903e-05, |
| "loss": 3.9923, |
| "step": 925184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4843530012192383e-05, |
| "loss": 3.998, |
| "step": 925696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4835144064681863e-05, |
| "loss": 4.021, |
| "step": 926208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.482675811717134e-05, |
| "loss": 4.0037, |
| "step": 926720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.481837216966082e-05, |
| "loss": 4.0079, |
| "step": 927232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.48099862221503e-05, |
| "loss": 4.0045, |
| "step": 927744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.480161665344351e-05, |
| "loss": 4.0083, |
| "step": 928256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.479324708473672e-05, |
| "loss": 4.0017, |
| "step": 928768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.47848611372262e-05, |
| "loss": 3.9965, |
| "step": 929280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.477647518971569e-05, |
| "loss": 3.9888, |
| "step": 929792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.476808924220517e-05, |
| "loss": 4.0023, |
| "step": 930304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.475970329469465e-05, |
| "loss": 3.9913, |
| "step": 930816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.475131734718413e-05, |
| "loss": 4.0015, |
| "step": 931328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.474293139967361e-05, |
| "loss": 3.9989, |
| "step": 931840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.473454545216309e-05, |
| "loss": 4.0087, |
| "step": 932352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.47261758834563e-05, |
| "loss": 4.007, |
| "step": 932864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.471778993594578e-05, |
| "loss": 3.9983, |
| "step": 933376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4709420367238986e-05, |
| "loss": 4.006, |
| "step": 933888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4701034419728466e-05, |
| "loss": 4.0079, |
| "step": 934400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4692648472217946e-05, |
| "loss": 3.9825, |
| "step": 934912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4684262524707426e-05, |
| "loss": 3.9996, |
| "step": 935424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4675876577196906e-05, |
| "loss": 3.9942, |
| "step": 935936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4667490629686386e-05, |
| "loss": 3.9905, |
| "step": 936448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4659104682175866e-05, |
| "loss": 4.0021, |
| "step": 936960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.465073511346908e-05, |
| "loss": 3.9985, |
| "step": 937472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.464234916595856e-05, |
| "loss": 4.0001, |
| "step": 937984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.463396321844804e-05, |
| "loss": 4.0041, |
| "step": 938496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.462557727093752e-05, |
| "loss": 4.0054, |
| "step": 939008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4617191323426995e-05, |
| "loss": 4.0068, |
| "step": 939520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4608805375916475e-05, |
| "loss": 4.0013, |
| "step": 940032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.460043580720969e-05, |
| "loss": 3.9728, |
| "step": 940544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.459204985969917e-05, |
| "loss": 4.0036, |
| "step": 941056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4583663912188644e-05, |
| "loss": 4.0002, |
| "step": 941568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4575277964678124e-05, |
| "loss": 4.0072, |
| "step": 942080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4566892017167604e-05, |
| "loss": 3.9976, |
| "step": 942592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.455850606965709e-05, |
| "loss": 3.9897, |
| "step": 943104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.455012012214657e-05, |
| "loss": 3.9853, |
| "step": 943616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.454173417463605e-05, |
| "loss": 3.9868, |
| "step": 944128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.453334822712553e-05, |
| "loss": 3.9946, |
| "step": 944640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.452499503722247e-05, |
| "loss": 3.9769, |
| "step": 945152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.451660908971195e-05, |
| "loss": 4.0137, |
| "step": 945664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.450822314220143e-05, |
| "loss": 3.9961, |
| "step": 946176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.449983719469091e-05, |
| "loss": 3.977, |
| "step": 946688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.449145124718039e-05, |
| "loss": 4.0023, |
| "step": 947200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.448306529966987e-05, |
| "loss": 3.9848, |
| "step": 947712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.447467935215935e-05, |
| "loss": 3.9848, |
| "step": 948224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.446629340464883e-05, |
| "loss": 3.9936, |
| "step": 948736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.445790745713831e-05, |
| "loss": 3.9955, |
| "step": 949248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4449537888431524e-05, |
| "loss": 3.9822, |
| "step": 949760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.444116831972473e-05, |
| "loss": 3.9975, |
| "step": 950272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.443278237221421e-05, |
| "loss": 3.9815, |
| "step": 950784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.442441280350742e-05, |
| "loss": 3.9907, |
| "step": 951296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.44160268559969e-05, |
| "loss": 3.9997, |
| "step": 951808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.440764090848638e-05, |
| "loss": 3.9957, |
| "step": 952320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.439925496097586e-05, |
| "loss": 3.995, |
| "step": 952832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.439086901346534e-05, |
| "loss": 3.9905, |
| "step": 953344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.438248306595482e-05, |
| "loss": 4.0148, |
| "step": 953856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.43740971184443e-05, |
| "loss": 3.9805, |
| "step": 954368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.436571117093378e-05, |
| "loss": 3.9982, |
| "step": 954880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.435732522342326e-05, |
| "loss": 3.9877, |
| "step": 955392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434897203352021e-05, |
| "loss": 3.9905, |
| "step": 955904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434058608600969e-05, |
| "loss": 3.9926, |
| "step": 956416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.433220013849917e-05, |
| "loss": 3.9935, |
| "step": 956928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.432381419098865e-05, |
| "loss": 4.0052, |
| "step": 957440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4315428243478127e-05, |
| "loss": 4.0035, |
| "step": 957952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4307042295967607e-05, |
| "loss": 3.9781, |
| "step": 958464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4298672727260816e-05, |
| "loss": 3.979, |
| "step": 958976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4290286779750296e-05, |
| "loss": 3.9829, |
| "step": 959488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4281900832239776e-05, |
| "loss": 3.9991, |
| "step": 960000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4273514884729256e-05, |
| "loss": 3.9872, |
| "step": 960512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4265128937218735e-05, |
| "loss": 3.9853, |
| "step": 961024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4256742989708215e-05, |
| "loss": 3.9986, |
| "step": 961536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4248357042197695e-05, |
| "loss": 3.988, |
| "step": 962048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.423997109468718e-05, |
| "loss": 3.9933, |
| "step": 962560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4231585147176655e-05, |
| "loss": 3.9854, |
| "step": 963072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4223199199666135e-05, |
| "loss": 3.9952, |
| "step": 963584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4214813252155615e-05, |
| "loss": 3.9959, |
| "step": 964096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4206427304645095e-05, |
| "loss": 3.9989, |
| "step": 964608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4198057735938304e-05, |
| "loss": 3.9811, |
| "step": 965120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.418968816723152e-05, |
| "loss": 3.9826, |
| "step": 965632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4181302219721e-05, |
| "loss": 3.9928, |
| "step": 966144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.417291627221048e-05, |
| "loss": 3.9921, |
| "step": 966656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.416453032469995e-05, |
| "loss": 4.006, |
| "step": 967168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.415614437718943e-05, |
| "loss": 3.9856, |
| "step": 967680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.414775842967892e-05, |
| "loss": 3.9892, |
| "step": 968192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.41393724821684e-05, |
| "loss": 3.9862, |
| "step": 968704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.413098653465788e-05, |
| "loss": 3.9813, |
| "step": 969216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.412261696595109e-05, |
| "loss": 3.988, |
| "step": 969728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.411423101844057e-05, |
| "loss": 3.9868, |
| "step": 970240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.410584507093005e-05, |
| "loss": 3.9891, |
| "step": 970752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.409745912341953e-05, |
| "loss": 3.9849, |
| "step": 971264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.408908955471274e-05, |
| "loss": 3.9817, |
| "step": 971776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.408070360720222e-05, |
| "loss": 3.9857, |
| "step": 972288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.40723176596917e-05, |
| "loss": 3.9785, |
| "step": 972800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.406394809098491e-05, |
| "loss": 3.9928, |
| "step": 973312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.405556214347439e-05, |
| "loss": 3.9927, |
| "step": 973824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4047176195963874e-05, |
| "loss": 3.9924, |
| "step": 974336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4038790248453354e-05, |
| "loss": 3.996, |
| "step": 974848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4030404300942833e-05, |
| "loss": 3.995, |
| "step": 975360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.402203473223604e-05, |
| "loss": 3.992, |
| "step": 975872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.401364878472552e-05, |
| "loss": 3.9818, |
| "step": 976384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4005262837215e-05, |
| "loss": 3.995, |
| "step": 976896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.399687688970448e-05, |
| "loss": 3.9892, |
| "step": 977408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398849094219396e-05, |
| "loss": 3.9875, |
| "step": 977920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398010499468344e-05, |
| "loss": 3.9969, |
| "step": 978432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.397171904717292e-05, |
| "loss": 3.9827, |
| "step": 978944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.39633330996624e-05, |
| "loss": 3.9911, |
| "step": 979456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.395496353095561e-05, |
| "loss": 3.9808, |
| "step": 979968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.394657758344509e-05, |
| "loss": 3.9862, |
| "step": 980480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.393820801473831e-05, |
| "loss": 3.9898, |
| "step": 980992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.392982206722779e-05, |
| "loss": 3.9807, |
| "step": 981504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.392143611971727e-05, |
| "loss": 4.0008, |
| "step": 982016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.391305017220675e-05, |
| "loss": 3.9909, |
| "step": 982528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.390466422469623e-05, |
| "loss": 3.9914, |
| "step": 983040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.389627827718571e-05, |
| "loss": 3.9848, |
| "step": 983552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388789232967519e-05, |
| "loss": 3.9902, |
| "step": 984064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.387950638216467e-05, |
| "loss": 3.9937, |
| "step": 984576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.387112043465414e-05, |
| "loss": 3.9945, |
| "step": 985088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3862750865947356e-05, |
| "loss": 3.9897, |
| "step": 985600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3854364918436836e-05, |
| "loss": 3.9857, |
| "step": 986112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.384597897092631e-05, |
| "loss": 3.9912, |
| "step": 986624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383759302341579e-05, |
| "loss": 4.0064, |
| "step": 987136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3829223454709005e-05, |
| "loss": 3.9869, |
| "step": 987648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.382083750719849e-05, |
| "loss": 3.995, |
| "step": 988160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3812451559687965e-05, |
| "loss": 3.9991, |
| "step": 988672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3804065612177445e-05, |
| "loss": 3.9879, |
| "step": 989184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.379569604347066e-05, |
| "loss": 3.9897, |
| "step": 989696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.378732647476387e-05, |
| "loss": 3.9866, |
| "step": 990208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.377894052725335e-05, |
| "loss": 3.9882, |
| "step": 990720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.377055457974283e-05, |
| "loss": 3.9926, |
| "step": 991232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.376216863223231e-05, |
| "loss": 3.9838, |
| "step": 991744 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.041832447052002, |
| "eval_runtime": 287.974, |
| "eval_samples_per_second": 1325.088, |
| "eval_steps_per_second": 41.41, |
| "step": 992160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.375378268472178e-05, |
| "loss": 4.0028, |
| "step": 992256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.374539673721126e-05, |
| "loss": 3.9708, |
| "step": 992768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.373701078970074e-05, |
| "loss": 3.9857, |
| "step": 993280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.372862484219023e-05, |
| "loss": 4.0051, |
| "step": 993792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.372023889467971e-05, |
| "loss": 3.9917, |
| "step": 994304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.371185294716919e-05, |
| "loss": 3.9877, |
| "step": 994816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.370346699965867e-05, |
| "loss": 3.9926, |
| "step": 995328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.369508105214815e-05, |
| "loss": 3.986, |
| "step": 995840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.368669510463763e-05, |
| "loss": 3.9783, |
| "step": 996352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367830915712711e-05, |
| "loss": 3.9989, |
| "step": 996864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.366992320961659e-05, |
| "loss": 3.99, |
| "step": 997376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.366153726210607e-05, |
| "loss": 3.9941, |
| "step": 997888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.365316769339928e-05, |
| "loss": 3.9969, |
| "step": 998400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.364478174588876e-05, |
| "loss": 3.9775, |
| "step": 998912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.363639579837824e-05, |
| "loss": 3.9822, |
| "step": 999424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362800985086772e-05, |
| "loss": 3.9705, |
| "step": 999936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.36196239033572e-05, |
| "loss": 3.989, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.361123795584668e-05, |
| "loss": 3.9861, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.360285200833616e-05, |
| "loss": 3.9792, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.359446606082564e-05, |
| "loss": 3.9833, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3586096492118854e-05, |
| "loss": 4.0079, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.357771054460833e-05, |
| "loss": 3.987, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.356932459709781e-05, |
| "loss": 3.9921, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.356093864958729e-05, |
| "loss": 3.991, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.355255270207677e-05, |
| "loss": 3.9947, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.354416675456625e-05, |
| "loss": 3.9857, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.353578080705573e-05, |
| "loss": 3.9815, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3527411238348936e-05, |
| "loss": 3.9807, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3519025290838416e-05, |
| "loss": 3.9811, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3510639343327896e-05, |
| "loss": 3.9781, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3502253395817376e-05, |
| "loss": 3.9867, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.349386744830686e-05, |
| "loss": 3.9811, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.348548150079634e-05, |
| "loss": 3.9977, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.347709555328582e-05, |
| "loss": 3.9898, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34687096057753e-05, |
| "loss": 3.9854, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346034003706851e-05, |
| "loss": 3.9948, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.345195408955799e-05, |
| "loss": 3.9921, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.344356814204747e-05, |
| "loss": 3.9669, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.343518219453695e-05, |
| "loss": 3.9862, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.342679624702643e-05, |
| "loss": 3.9785, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.341842667831964e-05, |
| "loss": 3.9766, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.341004073080912e-05, |
| "loss": 3.9885, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34016547832986e-05, |
| "loss": 3.9862, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.339326883578808e-05, |
| "loss": 3.9821, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.338488288827756e-05, |
| "loss": 3.9925, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.337649694076704e-05, |
| "loss": 3.9884, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.336811099325652e-05, |
| "loss": 3.9903, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3359725045746e-05, |
| "loss": 3.9874, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3351355477039216e-05, |
| "loss": 3.9609, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3342985908332425e-05, |
| "loss": 3.9849, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3334599960821905e-05, |
| "loss": 3.9907, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3326214013311385e-05, |
| "loss": 3.989, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.331782806580086e-05, |
| "loss": 3.981, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3309458497094074e-05, |
| "loss": 3.976, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.330108892838728e-05, |
| "loss": 3.9717, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.329270298087677e-05, |
| "loss": 3.9692, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.328431703336625e-05, |
| "loss": 3.984, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.327593108585573e-05, |
| "loss": 3.9643, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.326754513834521e-05, |
| "loss": 3.9921, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.325915919083469e-05, |
| "loss": 3.9842, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.325077324332416e-05, |
| "loss": 3.9656, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.324238729581364e-05, |
| "loss": 3.9857, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.323400134830312e-05, |
| "loss": 3.9677, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.32256154007926e-05, |
| "loss": 3.9718, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.321724583208581e-05, |
| "loss": 3.975, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320885988457529e-05, |
| "loss": 3.9853, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320047393706477e-05, |
| "loss": 3.9687, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.319208798955425e-05, |
| "loss": 3.9805, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.318370204204374e-05, |
| "loss": 3.9656, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.317533247333695e-05, |
| "loss": 3.9767, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.316694652582643e-05, |
| "loss": 3.9841, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.315856057831591e-05, |
| "loss": 3.9826, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.315017463080539e-05, |
| "loss": 3.9786, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.314178868329487e-05, |
| "loss": 3.9798, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.313340273578435e-05, |
| "loss": 3.9966, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.312501678827383e-05, |
| "loss": 3.9655, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3116647219567036e-05, |
| "loss": 3.9861, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3108261272056516e-05, |
| "loss": 3.9765, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3099875324545996e-05, |
| "loss": 3.971, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3091489377035476e-05, |
| "loss": 3.9747, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3083103429524956e-05, |
| "loss": 3.9791, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.307473386081817e-05, |
| "loss": 3.9883, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.306634791330765e-05, |
| "loss": 3.9916, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305797834460086e-05, |
| "loss": 3.9667, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.304959239709034e-05, |
| "loss": 3.9632, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.304120644957982e-05, |
| "loss": 3.9689, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.30328205020693e-05, |
| "loss": 3.9834, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.302443455455878e-05, |
| "loss": 3.973, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.301604860704826e-05, |
| "loss": 3.971, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.300766265953774e-05, |
| "loss": 3.9826, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.299927671202722e-05, |
| "loss": 3.9721, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.299092352212416e-05, |
| "loss": 3.9818, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2982537574613646e-05, |
| "loss": 3.9677, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2974151627103126e-05, |
| "loss": 3.9822, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2965765679592606e-05, |
| "loss": 3.9761, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2957379732082085e-05, |
| "loss": 3.9904, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2949010163375295e-05, |
| "loss": 3.9638, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2940624215864775e-05, |
| "loss": 3.9687, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2932238268354254e-05, |
| "loss": 3.982, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2923852320843734e-05, |
| "loss": 3.9771, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2915466373333214e-05, |
| "loss": 3.9873, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2907080425822694e-05, |
| "loss": 3.9716, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2898710857115903e-05, |
| "loss": 3.974, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2890324909605383e-05, |
| "loss": 3.9708, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.288193896209486e-05, |
| "loss": 3.9725, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.287355301458434e-05, |
| "loss": 3.9707, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.286518344587756e-05, |
| "loss": 3.9699, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.285679749836704e-05, |
| "loss": 3.9748, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284841155085652e-05, |
| "loss": 3.969, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284002560334599e-05, |
| "loss": 3.9695, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.283163965583547e-05, |
| "loss": 3.9712, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.282325370832495e-05, |
| "loss": 3.9617, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.281488413961817e-05, |
| "loss": 3.9833, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.280649819210764e-05, |
| "loss": 3.9736, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.279811224459712e-05, |
| "loss": 3.9783, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.27897262970866e-05, |
| "loss": 3.984, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.278134034957608e-05, |
| "loss": 3.9813, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.27729707808693e-05, |
| "loss": 3.9752, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.276458483335878e-05, |
| "loss": 3.9694, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.275619888584826e-05, |
| "loss": 3.9806, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.274781293833774e-05, |
| "loss": 3.9762, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2739443369630946e-05, |
| "loss": 3.9705, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2731057422120426e-05, |
| "loss": 3.9821, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2722671474609906e-05, |
| "loss": 3.971, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2714285527099386e-05, |
| "loss": 3.9763, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2705899579588866e-05, |
| "loss": 3.9672, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2697513632078346e-05, |
| "loss": 3.9677, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2689144063371555e-05, |
| "loss": 3.9778, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2680758115861035e-05, |
| "loss": 3.9666, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2672372168350515e-05, |
| "loss": 3.9809, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.266398622084e-05, |
| "loss": 3.983, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.265560027332948e-05, |
| "loss": 3.9744, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.264721432581896e-05, |
| "loss": 3.9679, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.263882837830844e-05, |
| "loss": 3.981, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.263044243079792e-05, |
| "loss": 3.9757, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.26220564832874e-05, |
| "loss": 3.9793, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.261368691458061e-05, |
| "loss": 3.9722, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.260531734587382e-05, |
| "loss": 3.9706, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.25969313983633e-05, |
| "loss": 3.9811, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.258854545085278e-05, |
| "loss": 3.9925, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.258015950334226e-05, |
| "loss": 3.9686, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.257177355583174e-05, |
| "loss": 3.9804, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.256338760832122e-05, |
| "loss": 3.9833, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.25550016608107e-05, |
| "loss": 3.9728, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2546615713300186e-05, |
| "loss": 3.977, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2538246144593395e-05, |
| "loss": 3.9717, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2529860197082875e-05, |
| "loss": 3.9714, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2521490628376084e-05, |
| "loss": 3.985, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2513104680865564e-05, |
| "loss": 3.9649, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.033089637756348, |
| "eval_runtime": 285.2648, |
| "eval_samples_per_second": 1337.673, |
| "eval_steps_per_second": 41.803, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2504718733355044e-05, |
| "loss": 3.974, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2496332785844524e-05, |
| "loss": 3.9612, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2487946838334004e-05, |
| "loss": 3.9697, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.247956089082348e-05, |
| "loss": 3.9866, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.247117494331296e-05, |
| "loss": 3.9825, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.246278899580244e-05, |
| "loss": 3.9718, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.245441942709565e-05, |
| "loss": 3.9793, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.244603347958513e-05, |
| "loss": 3.9693, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.243764753207461e-05, |
| "loss": 3.9653, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.242926158456409e-05, |
| "loss": 3.9833, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.24208920158573e-05, |
| "loss": 3.9772, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.241250606834678e-05, |
| "loss": 3.9814, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.240413649964e-05, |
| "loss": 3.9843, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.239575055212948e-05, |
| "loss": 3.9648, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238736460461895e-05, |
| "loss": 3.9658, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.237897865710843e-05, |
| "loss": 3.9587, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.237059270959791e-05, |
| "loss": 3.9766, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.236220676208739e-05, |
| "loss": 3.9697, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.235382081457688e-05, |
| "loss": 3.967, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.234543486706636e-05, |
| "loss": 3.9715, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.233704891955584e-05, |
| "loss": 3.9892, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2328679350849046e-05, |
| "loss": 3.9795, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2320309782142255e-05, |
| "loss": 3.9736, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2311923834631735e-05, |
| "loss": 3.9775, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2303537887121215e-05, |
| "loss": 3.9791, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2295151939610695e-05, |
| "loss": 3.9739, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2286765992100175e-05, |
| "loss": 3.9656, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2278396423393384e-05, |
| "loss": 3.9666, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2270010475882864e-05, |
| "loss": 3.9635, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2261624528372344e-05, |
| "loss": 3.9684, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2253238580861824e-05, |
| "loss": 3.9705, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.224485263335131e-05, |
| "loss": 3.9699, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.223648306464452e-05, |
| "loss": 3.9764, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2228097117134e-05, |
| "loss": 3.9774, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.221971116962348e-05, |
| "loss": 3.9718, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.221132522211296e-05, |
| "loss": 3.9783, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.220293927460244e-05, |
| "loss": 3.9792, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.219455332709192e-05, |
| "loss": 3.9534, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.218618375838513e-05, |
| "loss": 3.9777, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.217779781087461e-05, |
| "loss": 3.9566, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.216941186336409e-05, |
| "loss": 3.9664, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.216102591585357e-05, |
| "loss": 3.9702, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.215263996834305e-05, |
| "loss": 3.9787, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.214425402083253e-05, |
| "loss": 3.967, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2135884452125745e-05, |
| "loss": 3.9744, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2127498504615225e-05, |
| "loss": 3.9775, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2119112557104705e-05, |
| "loss": 3.9735, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2110726609594184e-05, |
| "loss": 3.9773, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2102357040887394e-05, |
| "loss": 3.9467, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2093971093376874e-05, |
| "loss": 3.9692, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2085585145866353e-05, |
| "loss": 3.9774, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2077199198355833e-05, |
| "loss": 3.9746, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.206881325084531e-05, |
| "loss": 3.9687, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.206044368213852e-05, |
| "loss": 3.9596, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2052057734628e-05, |
| "loss": 3.9638, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.204367178711748e-05, |
| "loss": 3.951, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.203528583960696e-05, |
| "loss": 3.9767, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.202689989209644e-05, |
| "loss": 3.9479, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201851394458592e-05, |
| "loss": 3.9755, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201014437587914e-05, |
| "loss": 3.9706, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.200175842836861e-05, |
| "loss": 3.9541, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.199337248085809e-05, |
| "loss": 3.9712, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.198498653334757e-05, |
| "loss": 3.9556, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.197660058583705e-05, |
| "loss": 3.9598, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.196821463832653e-05, |
| "loss": 3.9613, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.195982869081601e-05, |
| "loss": 3.9718, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.195144274330549e-05, |
| "loss": 3.9556, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.19430731745987e-05, |
| "loss": 3.9674, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.193468722708819e-05, |
| "loss": 3.9534, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.192630127957767e-05, |
| "loss": 3.961, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.191791533206715e-05, |
| "loss": 3.973, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1909545763360356e-05, |
| "loss": 3.9712, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1901159815849836e-05, |
| "loss": 3.9631, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1892773868339316e-05, |
| "loss": 3.9688, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1884387920828796e-05, |
| "loss": 3.9797, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1876001973318276e-05, |
| "loss": 3.9562, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1867632404611485e-05, |
| "loss": 3.9724, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1859246457100965e-05, |
| "loss": 3.9599, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1850860509590445e-05, |
| "loss": 3.9627, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1842490940883654e-05, |
| "loss": 3.9601, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.183410499337314e-05, |
| "loss": 3.9663, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.182571904586262e-05, |
| "loss": 3.978, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.18173330983521e-05, |
| "loss": 3.9763, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.180894715084158e-05, |
| "loss": 3.9622, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.180056120333106e-05, |
| "loss": 3.9447, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.179217525582054e-05, |
| "loss": 3.9563, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.178380568711375e-05, |
| "loss": 3.9697, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.177541973960323e-05, |
| "loss": 3.9617, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.176705017089644e-05, |
| "loss": 3.9592, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.175866422338592e-05, |
| "loss": 3.9699, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.17502782758754e-05, |
| "loss": 3.9634, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.174189232836488e-05, |
| "loss": 3.964, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.173350638085436e-05, |
| "loss": 3.9577, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.172512043334384e-05, |
| "loss": 3.9728, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.171673448583332e-05, |
| "loss": 3.9592, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.17083485383228e-05, |
| "loss": 3.9757, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.169996259081228e-05, |
| "loss": 3.9537, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.169157664330176e-05, |
| "loss": 3.9569, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.168319069579124e-05, |
| "loss": 3.9713, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.167482112708445e-05, |
| "loss": 3.9635, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.166643517957393e-05, |
| "loss": 3.974, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.165804923206341e-05, |
| "loss": 3.9617, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.164966328455289e-05, |
| "loss": 3.9618, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.164127733704237e-05, |
| "loss": 3.9579, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1632907768335576e-05, |
| "loss": 3.9635, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1624521820825056e-05, |
| "loss": 3.955, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.161613587331454e-05, |
| "loss": 3.9608, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.160774992580402e-05, |
| "loss": 3.9621, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.15993639782935e-05, |
| "loss": 3.9565, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.159099440958671e-05, |
| "loss": 3.9562, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.158260846207619e-05, |
| "loss": 3.9637, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.157422251456567e-05, |
| "loss": 3.9489, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.156583656705515e-05, |
| "loss": 3.9684, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155745061954463e-05, |
| "loss": 3.961, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.154908105083784e-05, |
| "loss": 3.9674, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.154069510332732e-05, |
| "loss": 3.9707, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.15323091558168e-05, |
| "loss": 3.9715, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.152393958711001e-05, |
| "loss": 3.9636, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1515553639599496e-05, |
| "loss": 3.9592, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1507167692088976e-05, |
| "loss": 3.9649, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1498781744578456e-05, |
| "loss": 3.9635, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1490395797067936e-05, |
| "loss": 3.9548, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1482009849557416e-05, |
| "loss": 3.9741, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1473623902046896e-05, |
| "loss": 3.9586, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1465237954536376e-05, |
| "loss": 3.9659, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1456852007025856e-05, |
| "loss": 3.9564, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1448466059515336e-05, |
| "loss": 3.9549, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1440096490808545e-05, |
| "loss": 3.9691, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1431710543298025e-05, |
| "loss": 3.9564, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1423324595787505e-05, |
| "loss": 3.9614, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.141493864827698e-05, |
| "loss": 3.9718, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1406552700766465e-05, |
| "loss": 3.9604, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1398166753255945e-05, |
| "loss": 3.9574, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1389780805745425e-05, |
| "loss": 3.9674, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1381394858234905e-05, |
| "loss": 3.9645, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1373025289528114e-05, |
| "loss": 3.9668, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1364639342017594e-05, |
| "loss": 3.957, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1356253394507074e-05, |
| "loss": 3.9603, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1347867446996554e-05, |
| "loss": 3.9742, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1339481499486034e-05, |
| "loss": 3.9765, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.133111193077924e-05, |
| "loss": 3.9609, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.132272598326872e-05, |
| "loss": 3.964, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.13143400357582e-05, |
| "loss": 3.9705, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.130595408824768e-05, |
| "loss": 3.9638, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.12975845195409e-05, |
| "loss": 3.9641, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.128919857203038e-05, |
| "loss": 3.9608, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.128081262451986e-05, |
| "loss": 3.9589, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.127242667700934e-05, |
| "loss": 3.9732, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.126405710830255e-05, |
| "loss": 3.9532, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.026190757751465, |
| "eval_runtime": 286.2894, |
| "eval_samples_per_second": 1332.885, |
| "eval_steps_per_second": 41.654, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.125567116079203e-05, |
| "loss": 3.9427, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.124728521328151e-05, |
| "loss": 3.9537, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123889926577099e-05, |
| "loss": 3.9551, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123051331826047e-05, |
| "loss": 3.9768, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.122212737074995e-05, |
| "loss": 3.9733, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.121374142323943e-05, |
| "loss": 3.96, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.120535547572891e-05, |
| "loss": 3.9664, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.119696952821839e-05, |
| "loss": 3.9591, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118858358070787e-05, |
| "loss": 3.9582, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118019763319735e-05, |
| "loss": 3.9687, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.117181168568683e-05, |
| "loss": 3.9626, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.116342573817631e-05, |
| "loss": 3.9681, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.115505616946952e-05, |
| "loss": 3.9731, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1146670221958996e-05, |
| "loss": 3.9544, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1138284274448476e-05, |
| "loss": 3.9538, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1129898326937956e-05, |
| "loss": 3.9472, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1121512379427436e-05, |
| "loss": 3.9625, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1113126431916916e-05, |
| "loss": 3.9611, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1104740484406396e-05, |
| "loss": 3.9541, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1096354536895876e-05, |
| "loss": 3.9595, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1087968589385356e-05, |
| "loss": 3.9762, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1079582641874836e-05, |
| "loss": 3.9631, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1071196694364316e-05, |
| "loss": 3.9669, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1062810746853796e-05, |
| "loss": 3.962, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.105442479934328e-05, |
| "loss": 3.9725, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.104607160944022e-05, |
| "loss": 3.9598, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.10376856619297e-05, |
| "loss": 3.9582, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.102929971441918e-05, |
| "loss": 3.9548, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.102091376690866e-05, |
| "loss": 3.9524, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.101252781939814e-05, |
| "loss": 3.9581, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.100414187188762e-05, |
| "loss": 3.9565, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.099577230318083e-05, |
| "loss": 3.9615, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098738635567031e-05, |
| "loss": 3.9649, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097900040815979e-05, |
| "loss": 3.9637, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097061446064927e-05, |
| "loss": 3.9653, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0962244891942485e-05, |
| "loss": 3.963, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0953858944431965e-05, |
| "loss": 3.9669, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0945472996921445e-05, |
| "loss": 3.9445, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0937087049410925e-05, |
| "loss": 3.9675, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0928701101900405e-05, |
| "loss": 3.941, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0920315154389885e-05, |
| "loss": 3.956, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.091192920687936e-05, |
| "loss": 3.9592, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0903559638172574e-05, |
| "loss": 3.9657, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0895173690662054e-05, |
| "loss": 3.9537, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.088678774315153e-05, |
| "loss": 3.966, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087840179564101e-05, |
| "loss": 3.9648, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087003222693422e-05, |
| "loss": 3.9636, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.08616462794237e-05, |
| "loss": 3.9633, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.085326033191318e-05, |
| "loss": 3.9394, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.084487438440266e-05, |
| "loss": 3.9554, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.083648843689214e-05, |
| "loss": 3.9673, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082810248938162e-05, |
| "loss": 3.9608, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.08197165418711e-05, |
| "loss": 3.9576, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.081134697316431e-05, |
| "loss": 3.9478, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.080296102565379e-05, |
| "loss": 3.9533, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.079457507814327e-05, |
| "loss": 3.9385, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.078618913063275e-05, |
| "loss": 3.968, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077780318312223e-05, |
| "loss": 3.9358, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076941723561171e-05, |
| "loss": 3.9622, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076103128810119e-05, |
| "loss": 3.9589, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.075264534059067e-05, |
| "loss": 3.9482, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.074427577188389e-05, |
| "loss": 3.9548, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.073588982437337e-05, |
| "loss": 3.9462, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072750387686285e-05, |
| "loss": 3.9477, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071911792935233e-05, |
| "loss": 3.9435, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071073198184181e-05, |
| "loss": 3.9656, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.070234603433129e-05, |
| "loss": 3.949, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0693976465624496e-05, |
| "loss": 3.948, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0685590518113976e-05, |
| "loss": 3.9427, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0677204570603456e-05, |
| "loss": 3.9512, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0668818623092936e-05, |
| "loss": 3.9646, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0660449054386145e-05, |
| "loss": 3.9576, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0652063106875625e-05, |
| "loss": 3.9497, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0643677159365105e-05, |
| "loss": 3.96, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.063530759065832e-05, |
| "loss": 3.968, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06269216431478e-05, |
| "loss": 3.9434, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061853569563728e-05, |
| "loss": 3.9611, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061014974812676e-05, |
| "loss": 3.9501, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.060176380061624e-05, |
| "loss": 3.9516, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.059337785310572e-05, |
| "loss": 3.9484, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058500828439893e-05, |
| "loss": 3.9564, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057662233688841e-05, |
| "loss": 3.9654, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.056823638937789e-05, |
| "loss": 3.9648, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055985044186736e-05, |
| "loss": 3.9514, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055146449435684e-05, |
| "loss": 3.9293, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.054307854684633e-05, |
| "loss": 3.9511, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.053469259933581e-05, |
| "loss": 3.9552, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.052632303062902e-05, |
| "loss": 3.9555, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05179370831185e-05, |
| "loss": 3.9415, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0509567514411715e-05, |
| "loss": 3.959, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0501181566901188e-05, |
| "loss": 3.9521, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0492795619390668e-05, |
| "loss": 3.9547, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0484409671880148e-05, |
| "loss": 3.9442, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0476023724369628e-05, |
| "loss": 3.9608, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.046763777685911e-05, |
| "loss": 3.9465, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.045926820815232e-05, |
| "loss": 3.9649, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.04508822606418e-05, |
| "loss": 3.9484, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.044249631313128e-05, |
| "loss": 3.9417, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.043411036562076e-05, |
| "loss": 3.9559, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.042572441811024e-05, |
| "loss": 3.9532, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.041733847059972e-05, |
| "loss": 3.9603, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.04089525230892e-05, |
| "loss": 3.9508, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0400566575578683e-05, |
| "loss": 3.9503, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0392180628068163e-05, |
| "loss": 3.9449, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0383794680557643e-05, |
| "loss": 3.9546, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0375408733047123e-05, |
| "loss": 3.9407, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0367039164340332e-05, |
| "loss": 3.9528, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0358653216829812e-05, |
| "loss": 3.9506, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0350267269319292e-05, |
| "loss": 3.949, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0341881321808775e-05, |
| "loss": 3.9392, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0333495374298255e-05, |
| "loss": 3.9517, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.032510942678773e-05, |
| "loss": 3.9413, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.031672347927721e-05, |
| "loss": 3.9563, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.030833753176669e-05, |
| "loss": 3.9504, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0299967963059904e-05, |
| "loss": 3.9523, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.029158201554938e-05, |
| "loss": 3.9612, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.028319606803886e-05, |
| "loss": 3.9598, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0274826499332077e-05, |
| "loss": 3.9559, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.026644055182155e-05, |
| "loss": 3.9449, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.025805460431103e-05, |
| "loss": 3.9565, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0249668656800513e-05, |
| "loss": 3.9537, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0241282709289993e-05, |
| "loss": 3.9443, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0232896761779473e-05, |
| "loss": 3.9588, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0224510814268953e-05, |
| "loss": 3.9492, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0216124866758433e-05, |
| "loss": 3.9558, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0207738919247913e-05, |
| "loss": 3.9448, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0199369350541122e-05, |
| "loss": 3.947, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0190983403030605e-05, |
| "loss": 3.952, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0182597455520085e-05, |
| "loss": 3.946, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0174211508009565e-05, |
| "loss": 3.9461, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0165841939302774e-05, |
| "loss": 3.965, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0157455991792254e-05, |
| "loss": 3.9537, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0149070044281734e-05, |
| "loss": 3.9451, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0140684096771214e-05, |
| "loss": 3.9553, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0132314528064427e-05, |
| "loss": 3.9541, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0123928580553907e-05, |
| "loss": 3.9555, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0115542633043387e-05, |
| "loss": 3.951, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0107156685532867e-05, |
| "loss": 3.9485, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0098787116826076e-05, |
| "loss": 3.9608, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0090417548119288e-05, |
| "loss": 3.9648, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0082031600608768e-05, |
| "loss": 3.9508, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0073645653098248e-05, |
| "loss": 3.9528, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0065259705587728e-05, |
| "loss": 3.9629, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0056873758077208e-05, |
| "loss": 3.9475, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0048487810566688e-05, |
| "loss": 3.9541, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0040101863056168e-05, |
| "loss": 3.9485, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.003171591554565e-05, |
| "loss": 3.9489, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.002334634683886e-05, |
| "loss": 3.9603, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.001496039932834e-05, |
| "loss": 3.9436, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.000657445181782e-05, |
| "loss": 3.9473, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.020456314086914, |
| "eval_runtime": 280.3059, |
| "eval_samples_per_second": 1361.338, |
| "eval_steps_per_second": 42.543, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.99981885043073e-05, |
| "loss": 3.9407, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9989818935600513e-05, |
| "loss": 3.9426, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9981432988089993e-05, |
| "loss": 3.9642, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9973047040579473e-05, |
| "loss": 3.9625, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9964661093068953e-05, |
| "loss": 3.9492, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9956275145558432e-05, |
| "loss": 3.9541, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9947889198047912e-05, |
| "loss": 3.948, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.993950325053739e-05, |
| "loss": 3.9407, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.993111730302687e-05, |
| "loss": 3.9616, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9922747734320085e-05, |
| "loss": 3.9499, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9914378165613294e-05, |
| "loss": 3.959, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9906008596906503e-05, |
| "loss": 3.9571, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9897639028199716e-05, |
| "loss": 3.9502, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9889253080689195e-05, |
| "loss": 3.9434, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9880867133178675e-05, |
| "loss": 3.9339, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9872481185668155e-05, |
| "loss": 3.9468, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9864095238157635e-05, |
| "loss": 3.9489, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9855709290647115e-05, |
| "loss": 3.9441, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9847323343136595e-05, |
| "loss": 3.9483, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9838937395626075e-05, |
| "loss": 3.9689, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9830551448115555e-05, |
| "loss": 3.9523, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.982216550060504e-05, |
| "loss": 3.9544, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.981377955309451e-05, |
| "loss": 3.9493, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.980539360558399e-05, |
| "loss": 3.964, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.979700765807347e-05, |
| "loss": 3.9418, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.978862171056295e-05, |
| "loss": 3.9478, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.978023576305243e-05, |
| "loss": 3.9422, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9771866194345644e-05, |
| "loss": 3.9444, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9763480246835124e-05, |
| "loss": 3.9458, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9755094299324604e-05, |
| "loss": 3.9443, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9746708351814084e-05, |
| "loss": 3.9461, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9738322404303564e-05, |
| "loss": 3.9557, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.972996921440051e-05, |
| "loss": 3.9484, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9721583266889985e-05, |
| "loss": 3.9575, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9713197319379465e-05, |
| "loss": 3.9498, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9704811371868945e-05, |
| "loss": 3.9571, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9696441803162154e-05, |
| "loss": 3.9329, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9688055855651638e-05, |
| "loss": 3.9573, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9679669908141118e-05, |
| "loss": 3.9314, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9671283960630598e-05, |
| "loss": 3.9481, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9662898013120078e-05, |
| "loss": 3.9444, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9654512065609558e-05, |
| "loss": 3.9541, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9646142496902767e-05, |
| "loss": 3.9393, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9637756549392247e-05, |
| "loss": 3.9576, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.962937060188173e-05, |
| "loss": 3.9504, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.962098465437121e-05, |
| "loss": 3.9518, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.961259870686069e-05, |
| "loss": 3.9532, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.96042291381539e-05, |
| "loss": 3.9283, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9595859569447108e-05, |
| "loss": 3.9478, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.958747362193659e-05, |
| "loss": 3.9553, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.957908767442607e-05, |
| "loss": 3.9497, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.957070172691555e-05, |
| "loss": 3.9492, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.956231577940503e-05, |
| "loss": 3.938, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.955392983189451e-05, |
| "loss": 3.9428, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.954554388438399e-05, |
| "loss": 3.9282, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.953715793687347e-05, |
| "loss": 3.9565, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9528788368166684e-05, |
| "loss": 3.9253, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9520402420656164e-05, |
| "loss": 3.9534, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9512016473145644e-05, |
| "loss": 3.9484, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9503630525635123e-05, |
| "loss": 3.9352, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9495244578124603e-05, |
| "loss": 3.9411, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9486858630614083e-05, |
| "loss": 3.9393, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9478472683103563e-05, |
| "loss": 3.9372, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9470086735593043e-05, |
| "loss": 3.933, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9461717166886256e-05, |
| "loss": 3.9533, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9453347598179465e-05, |
| "loss": 3.939, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9444961650668945e-05, |
| "loss": 3.9362, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9436575703158425e-05, |
| "loss": 3.9336, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9428189755647905e-05, |
| "loss": 3.9391, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9419820186941117e-05, |
| "loss": 3.9527, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9411434239430597e-05, |
| "loss": 3.9491, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9403048291920077e-05, |
| "loss": 3.9385, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9394662344409557e-05, |
| "loss": 3.9505, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9386292775702766e-05, |
| "loss": 3.9541, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9377906828192246e-05, |
| "loss": 3.9333, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.936952088068173e-05, |
| "loss": 3.9492, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.936113493317121e-05, |
| "loss": 3.9425, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.935274898566069e-05, |
| "loss": 3.942, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.934436303815017e-05, |
| "loss": 3.9388, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9335977090639643e-05, |
| "loss": 3.9421, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9327591143129123e-05, |
| "loss": 3.9516, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9319205195618606e-05, |
| "loss": 3.9599, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9310835626911815e-05, |
| "loss": 3.9363, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9302449679401295e-05, |
| "loss": 3.9185, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.929408011069451e-05, |
| "loss": 3.9402, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.928569416318399e-05, |
| "loss": 3.9431, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9277308215673467e-05, |
| "loss": 3.9446, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9268922268162947e-05, |
| "loss": 3.9366, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9260569078259892e-05, |
| "loss": 3.9467, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9252183130749372e-05, |
| "loss": 3.944, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9243797183238852e-05, |
| "loss": 3.9404, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9235411235728332e-05, |
| "loss": 3.9346, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9227025288217812e-05, |
| "loss": 3.9511, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.921863934070729e-05, |
| "loss": 3.9361, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.921025339319677e-05, |
| "loss": 3.9527, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.920186744568625e-05, |
| "loss": 3.9422, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.919348149817573e-05, |
| "loss": 3.9289, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.918509555066521e-05, |
| "loss": 3.9428, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.917670960315469e-05, |
| "loss": 3.9436, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.91683400344479e-05, |
| "loss": 3.9522, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.915995408693738e-05, |
| "loss": 3.9405, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.915156813942686e-05, |
| "loss": 3.9414, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.914318219191634e-05, |
| "loss": 3.9342, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.913481262320955e-05, |
| "loss": 3.9456, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.912642667569903e-05, |
| "loss": 3.9284, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.911804072818851e-05, |
| "loss": 3.9385, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9109654780677993e-05, |
| "loss": 3.9416, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9101268833167473e-05, |
| "loss": 3.9402, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9092882885656953e-05, |
| "loss": 3.9312, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9084496938146433e-05, |
| "loss": 3.9384, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9076110990635913e-05, |
| "loss": 3.9356, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9067741421929122e-05, |
| "loss": 3.9445, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9059355474418602e-05, |
| "loss": 3.9358, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9050969526908085e-05, |
| "loss": 3.9382, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9042583579397565e-05, |
| "loss": 3.9522, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9034214010690774e-05, |
| "loss": 3.9492, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9025844441983984e-05, |
| "loss": 3.9423, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9017458494473463e-05, |
| "loss": 3.9341, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9009072546962947e-05, |
| "loss": 3.9443, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9000686599452427e-05, |
| "loss": 3.9399, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8992317030745636e-05, |
| "loss": 3.9364, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8983931083235116e-05, |
| "loss": 3.9526, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8975545135724596e-05, |
| "loss": 3.9367, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8967159188214076e-05, |
| "loss": 3.9458, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8958773240703556e-05, |
| "loss": 3.9307, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.895038729319304e-05, |
| "loss": 3.9362, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8942017724486248e-05, |
| "loss": 3.9432, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8933631776975728e-05, |
| "loss": 3.9368, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8925245829465208e-05, |
| "loss": 3.9305, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8916859881954688e-05, |
| "loss": 3.957, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.89084903132479e-05, |
| "loss": 3.9431, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.890010436573738e-05, |
| "loss": 3.9399, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.889171841822686e-05, |
| "loss": 3.9391, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.888333247071634e-05, |
| "loss": 3.9393, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.887494652320582e-05, |
| "loss": 3.9461, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.88665605756953e-05, |
| "loss": 3.9446, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8858174628184777e-05, |
| "loss": 3.9382, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8849788680674257e-05, |
| "loss": 3.9531, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8841402733163737e-05, |
| "loss": 3.9482, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8833016785653217e-05, |
| "loss": 3.9447, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8824647216946426e-05, |
| "loss": 3.9431, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8816261269435906e-05, |
| "loss": 3.9477, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8807875321925386e-05, |
| "loss": 3.9418, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.879948937441487e-05, |
| "loss": 3.9443, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.879110342690435e-05, |
| "loss": 3.9413, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8782733858197558e-05, |
| "loss": 3.9408, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8774347910687038e-05, |
| "loss": 3.9497, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8765961963176518e-05, |
| "loss": 3.9345, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8757576015665998e-05, |
| "loss": 3.9366, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.0154266357421875, |
| "eval_runtime": 318.4816, |
| "eval_samples_per_second": 1198.157, |
| "eval_steps_per_second": 37.443, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8749190068155478e-05, |
| "loss": 3.9305, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.874080412064496e-05, |
| "loss": 3.9325, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.873241817313444e-05, |
| "loss": 3.9575, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.872403222562392e-05, |
| "loss": 3.9487, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.87156462781134e-05, |
| "loss": 3.9429, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.870726033060288e-05, |
| "loss": 3.9417, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.869887438309236e-05, |
| "loss": 3.9402, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8690488435581838e-05, |
| "loss": 3.932, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8682102488071318e-05, |
| "loss": 3.9463, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8673716540560798e-05, |
| "loss": 3.9441, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8665330593050277e-05, |
| "loss": 3.947, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8656944645539757e-05, |
| "loss": 3.9494, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8648575076832967e-05, |
| "loss": 3.9361, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8640189129322446e-05, |
| "loss": 3.9357, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8631803181811926e-05, |
| "loss": 3.9304, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.862341723430141e-05, |
| "loss": 3.9301, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.861503128679089e-05, |
| "loss": 3.9409, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.860664533928037e-05, |
| "loss": 3.932, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.859825939176985e-05, |
| "loss": 3.9401, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.858987344425933e-05, |
| "loss": 3.9608, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.858148749674881e-05, |
| "loss": 3.941, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.857310154923829e-05, |
| "loss": 3.9472, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.856471560172777e-05, |
| "loss": 3.9376, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.855632965421725e-05, |
| "loss": 3.9511, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8547943706706726e-05, |
| "loss": 3.9354, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8539574137999942e-05, |
| "loss": 3.9398, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8531188190489422e-05, |
| "loss": 3.9321, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8522802242978902e-05, |
| "loss": 3.9379, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.851441629546838e-05, |
| "loss": 3.9335, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8506046726761594e-05, |
| "loss": 3.9339, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8497677158054803e-05, |
| "loss": 3.9377, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8489291210544283e-05, |
| "loss": 3.9481, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8480905263033763e-05, |
| "loss": 3.938, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8472519315523243e-05, |
| "loss": 3.9451, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8464149746816456e-05, |
| "loss": 3.9382, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8455763799305936e-05, |
| "loss": 3.9487, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8447377851795416e-05, |
| "loss": 3.9265, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8438991904284896e-05, |
| "loss": 3.9437, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8430605956774375e-05, |
| "loss": 3.9225, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.842222000926385e-05, |
| "loss": 3.9429, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8413834061753332e-05, |
| "loss": 3.9326, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8405448114242812e-05, |
| "loss": 3.9467, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8397078545536028e-05, |
| "loss": 3.9295, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8388708976829237e-05, |
| "loss": 3.9463, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8380323029318717e-05, |
| "loss": 3.9426, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8371937081808197e-05, |
| "loss": 3.9414, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8363551134297673e-05, |
| "loss": 3.9482, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8355165186787153e-05, |
| "loss": 3.9207, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8346779239276633e-05, |
| "loss": 3.9318, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8338393291766113e-05, |
| "loss": 3.9429, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8330023723059322e-05, |
| "loss": 3.9411, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8321637775548802e-05, |
| "loss": 3.9347, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8313251828038286e-05, |
| "loss": 3.9336, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8304865880527766e-05, |
| "loss": 3.9313, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8296479933017246e-05, |
| "loss": 3.9239, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8288093985506726e-05, |
| "loss": 3.9388, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8279708037996205e-05, |
| "loss": 3.9206, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8271338469289415e-05, |
| "loss": 3.9359, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8262952521778895e-05, |
| "loss": 3.9435, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8254566574268378e-05, |
| "loss": 3.9261, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8246180626757858e-05, |
| "loss": 3.9275, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8237794679247338e-05, |
| "loss": 3.9334, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8229441489344276e-05, |
| "loss": 3.9226, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8221055541833756e-05, |
| "loss": 3.9247, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.821266959432324e-05, |
| "loss": 3.9449, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.820428364681272e-05, |
| "loss": 3.9309, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.81958976993022e-05, |
| "loss": 3.9274, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.818751175179168e-05, |
| "loss": 3.9243, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.817912580428116e-05, |
| "loss": 3.9277, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.817073985677064e-05, |
| "loss": 3.9405, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8162370288063848e-05, |
| "loss": 3.9362, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.815398434055333e-05, |
| "loss": 3.9302, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.814559839304281e-05, |
| "loss": 3.9421, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.813721244553229e-05, |
| "loss": 3.9428, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.812882649802177e-05, |
| "loss": 3.9276, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.812045692931498e-05, |
| "loss": 3.9369, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.811207098180446e-05, |
| "loss": 3.9329, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.810368503429394e-05, |
| "loss": 3.9325, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8095315465587153e-05, |
| "loss": 3.9292, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8086929518076633e-05, |
| "loss": 3.9309, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8078543570566113e-05, |
| "loss": 3.944, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8070157623055593e-05, |
| "loss": 3.9415, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8061771675545073e-05, |
| "loss": 3.9333, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8053385728034553e-05, |
| "loss": 3.91, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8044999780524033e-05, |
| "loss": 3.9293, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.803661383301351e-05, |
| "loss": 3.9341, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8028244264306725e-05, |
| "loss": 3.9348, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8019858316796205e-05, |
| "loss": 3.9316, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8011488748089414e-05, |
| "loss": 3.9316, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8003102800578894e-05, |
| "loss": 3.9348, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7994716853068374e-05, |
| "loss": 3.9287, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7986330905557857e-05, |
| "loss": 3.9231, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.797794495804733e-05, |
| "loss": 3.9409, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.796955901053681e-05, |
| "loss": 3.9232, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.796117306302629e-05, |
| "loss": 3.9438, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.795278711551577e-05, |
| "loss": 3.9365, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7944417546808983e-05, |
| "loss": 3.9181, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.79360479781022e-05, |
| "loss": 3.9334, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.792766203059168e-05, |
| "loss": 3.9324, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.791927608308116e-05, |
| "loss": 3.9428, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7910890135570632e-05, |
| "loss": 3.9343, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7902504188060112e-05, |
| "loss": 3.9312, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7894118240549595e-05, |
| "loss": 3.9231, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7885732293039075e-05, |
| "loss": 3.9374, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7877362724332284e-05, |
| "loss": 3.9182, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7868976776821764e-05, |
| "loss": 3.9273, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7860590829311244e-05, |
| "loss": 3.9329, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7852204881800724e-05, |
| "loss": 3.9309, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7843818934290204e-05, |
| "loss": 3.9197, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7835449365583417e-05, |
| "loss": 3.931, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7827063418072897e-05, |
| "loss": 3.9271, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7818677470562376e-05, |
| "loss": 3.93, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7810291523051856e-05, |
| "loss": 3.9297, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7801905575541336e-05, |
| "loss": 3.9244, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7793519628030816e-05, |
| "loss": 3.95, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7785133680520296e-05, |
| "loss": 3.9402, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.777676411181351e-05, |
| "loss": 3.9334, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.776837816430299e-05, |
| "loss": 3.9242, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.775999221679247e-05, |
| "loss": 3.9333, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.775160626928195e-05, |
| "loss": 3.9319, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.774322032177143e-05, |
| "loss": 3.9233, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.773483437426091e-05, |
| "loss": 3.9438, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.772644842675039e-05, |
| "loss": 3.9322, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7718062479239872e-05, |
| "loss": 3.9304, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7709676531729345e-05, |
| "loss": 3.9265, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.770132334182629e-05, |
| "loss": 3.929, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.769293739431577e-05, |
| "loss": 3.9291, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.768455144680525e-05, |
| "loss": 3.9306, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7676165499294733e-05, |
| "loss": 3.9171, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7667779551784213e-05, |
| "loss": 3.9468, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7659409983077422e-05, |
| "loss": 3.9398, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7651024035566902e-05, |
| "loss": 3.9304, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7642638088056382e-05, |
| "loss": 3.9289, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7634252140545862e-05, |
| "loss": 3.9252, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7625866193035342e-05, |
| "loss": 3.9389, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.761748024552482e-05, |
| "loss": 3.9367, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7609110676818035e-05, |
| "loss": 3.9273, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7600724729307515e-05, |
| "loss": 3.9444, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7592338781796995e-05, |
| "loss": 3.9357, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.758395283428647e-05, |
| "loss": 3.9375, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.757556688677595e-05, |
| "loss": 3.9325, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.756718093926543e-05, |
| "loss": 3.9396, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.755881137055864e-05, |
| "loss": 3.9334, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.755042542304812e-05, |
| "loss": 3.9314, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.75420394755376e-05, |
| "loss": 3.9322, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7533669906830816e-05, |
| "loss": 3.9343, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7525283959320292e-05, |
| "loss": 3.9366, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7516898011809772e-05, |
| "loss": 3.9246, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7508512064299252e-05, |
| "loss": 3.9278, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.011216640472412, |
| "eval_runtime": 289.5858, |
| "eval_samples_per_second": 1317.713, |
| "eval_steps_per_second": 41.18, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7500126116788732e-05, |
| "loss": 3.9221, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7491740169278212e-05, |
| "loss": 3.9217, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7483354221767692e-05, |
| "loss": 3.9482, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7474968274257172e-05, |
| "loss": 3.9406, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7466582326746652e-05, |
| "loss": 3.9329, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7458196379236135e-05, |
| "loss": 3.9299, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7449810431725615e-05, |
| "loss": 3.9329, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7441424484215095e-05, |
| "loss": 3.9205, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7433071294312034e-05, |
| "loss": 3.9401, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7424685346801514e-05, |
| "loss": 3.9346, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7416299399290997e-05, |
| "loss": 3.936, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7407913451780477e-05, |
| "loss": 3.9396, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7399543883073686e-05, |
| "loss": 3.9265, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7391157935563166e-05, |
| "loss": 3.9259, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7382771988052646e-05, |
| "loss": 3.9236, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7374386040542126e-05, |
| "loss": 3.9245, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7366000093031606e-05, |
| "loss": 3.9272, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.735761414552109e-05, |
| "loss": 3.9224, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.734922819801057e-05, |
| "loss": 3.93, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.734084225050005e-05, |
| "loss": 3.9553, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7332472681793258e-05, |
| "loss": 3.9324, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7324086734282738e-05, |
| "loss": 3.9338, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7315700786772218e-05, |
| "loss": 3.9338, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7307314839261698e-05, |
| "loss": 3.9399, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.729892889175118e-05, |
| "loss": 3.9254, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7290542944240655e-05, |
| "loss": 3.9333, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7282156996730134e-05, |
| "loss": 3.922, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7273771049219614e-05, |
| "loss": 3.927, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7265401480512827e-05, |
| "loss": 3.9229, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7257015533002307e-05, |
| "loss": 3.9248, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7248629585491787e-05, |
| "loss": 3.9266, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7240243637981267e-05, |
| "loss": 3.9399, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7231857690470747e-05, |
| "loss": 3.9309, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7223488121763956e-05, |
| "loss": 3.9386, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7215118553057172e-05, |
| "loss": 3.923, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.720673260554665e-05, |
| "loss": 3.9444, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7198346658036128e-05, |
| "loss": 3.9137, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7189960710525608e-05, |
| "loss": 3.9368, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7181574763015088e-05, |
| "loss": 3.9143, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7173188815504568e-05, |
| "loss": 3.932, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.716481924679778e-05, |
| "loss": 3.9223, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.715643329928726e-05, |
| "loss": 3.9399, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7148063730580476e-05, |
| "loss": 3.9223, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.713967778306995e-05, |
| "loss": 3.9318, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.713129183555943e-05, |
| "loss": 3.9343, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.712290588804891e-05, |
| "loss": 3.9369, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.711451994053839e-05, |
| "loss": 3.9363, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7106133993027873e-05, |
| "loss": 3.9163, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7097748045517353e-05, |
| "loss": 3.9181, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7089362098006833e-05, |
| "loss": 3.9361, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7080976150496313e-05, |
| "loss": 3.9329, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7072590202985793e-05, |
| "loss": 3.9233, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7064204255475273e-05, |
| "loss": 3.925, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7055834686768482e-05, |
| "loss": 3.9244, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7047448739257965e-05, |
| "loss": 3.9121, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7039062791747445e-05, |
| "loss": 3.9266, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7030676844236925e-05, |
| "loss": 3.9141, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7022290896726405e-05, |
| "loss": 3.9265, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7013904949215885e-05, |
| "loss": 3.936, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7005519001705365e-05, |
| "loss": 3.9175, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6997149432998574e-05, |
| "loss": 3.92, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6988763485488054e-05, |
| "loss": 3.9237, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6980377537977537e-05, |
| "loss": 3.9135, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.697199159046701e-05, |
| "loss": 3.9119, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.696360564295649e-05, |
| "loss": 3.9347, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.695521969544597e-05, |
| "loss": 3.9223, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.694683374793545e-05, |
| "loss": 3.9174, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.693844780042493e-05, |
| "loss": 3.9154, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6930078231718143e-05, |
| "loss": 3.922, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6921692284207623e-05, |
| "loss": 3.9313, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6913306336697103e-05, |
| "loss": 3.9295, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6904920389186583e-05, |
| "loss": 3.9199, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.689655082047979e-05, |
| "loss": 3.9325, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6888164872969275e-05, |
| "loss": 3.9304, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6879778925458755e-05, |
| "loss": 3.921, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6871392977948235e-05, |
| "loss": 3.9258, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6863007030437715e-05, |
| "loss": 3.9283, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6854637461730924e-05, |
| "loss": 3.9243, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6846251514220404e-05, |
| "loss": 3.9229, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6837865566709884e-05, |
| "loss": 3.922, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6829479619199367e-05, |
| "loss": 3.9316, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6821093671688847e-05, |
| "loss": 3.934, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6812724102982056e-05, |
| "loss": 3.9211, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6804338155471536e-05, |
| "loss": 3.908, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6795952207961016e-05, |
| "loss": 3.9167, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6787566260450496e-05, |
| "loss": 3.9244, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.677919669174371e-05, |
| "loss": 3.9275, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.677081074423319e-05, |
| "loss": 3.9213, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6762441175526398e-05, |
| "loss": 3.9231, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6754055228015878e-05, |
| "loss": 3.9295, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6745669280505358e-05, |
| "loss": 3.9152, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6737283332994838e-05, |
| "loss": 3.9176, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.672889738548432e-05, |
| "loss": 3.9299, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.67205114379738e-05, |
| "loss": 3.9184, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.671214186926701e-05, |
| "loss": 3.9364, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.670375592175649e-05, |
| "loss": 3.9237, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.669536997424597e-05, |
| "loss": 3.9097, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.668698402673545e-05, |
| "loss": 3.9251, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.667859807922493e-05, |
| "loss": 3.9221, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6670212131714413e-05, |
| "loss": 3.9355, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6661826184203893e-05, |
| "loss": 3.9295, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6653440236693373e-05, |
| "loss": 3.9193, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6645070667986582e-05, |
| "loss": 3.9113, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6636684720476062e-05, |
| "loss": 3.9293, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6628315151769275e-05, |
| "loss": 3.9104, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6619929204258755e-05, |
| "loss": 3.922, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6611543256748234e-05, |
| "loss": 3.9222, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6603157309237714e-05, |
| "loss": 3.9192, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6594771361727194e-05, |
| "loss": 3.9086, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6586385414216674e-05, |
| "loss": 3.9286, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.657799946670615e-05, |
| "loss": 3.9155, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.656961351919563e-05, |
| "loss": 3.9185, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.656122757168511e-05, |
| "loss": 3.922, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.655285800297832e-05, |
| "loss": 3.9155, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.65444720554678e-05, |
| "loss": 3.9388, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.653608610795728e-05, |
| "loss": 3.9286, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.652770016044676e-05, |
| "loss": 3.9279, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6519330591739972e-05, |
| "loss": 3.915, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6510944644229452e-05, |
| "loss": 3.9227, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6502575075522668e-05, |
| "loss": 3.9268, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6494189128012148e-05, |
| "loss": 3.9114, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.648580318050162e-05, |
| "loss": 3.9312, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6477417232991105e-05, |
| "loss": 3.9243, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.646904766428432e-05, |
| "loss": 3.9191, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6460661716773794e-05, |
| "loss": 3.9217, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6452275769263274e-05, |
| "loss": 3.9204, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6443889821752754e-05, |
| "loss": 3.9144, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6435503874242233e-05, |
| "loss": 3.925, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6427117926731713e-05, |
| "loss": 3.9108, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6418731979221197e-05, |
| "loss": 3.9332, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6410346031710677e-05, |
| "loss": 3.9335, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6401960084200157e-05, |
| "loss": 3.9188, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6393590515493366e-05, |
| "loss": 3.9197, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6385204567982846e-05, |
| "loss": 3.9183, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6376818620472326e-05, |
| "loss": 3.9269, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6368449051765538e-05, |
| "loss": 3.9253, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6360063104255018e-05, |
| "loss": 3.9216, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6351677156744498e-05, |
| "loss": 3.932, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6343291209233978e-05, |
| "loss": 3.9271, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6334905261723458e-05, |
| "loss": 3.9293, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6326519314212938e-05, |
| "loss": 3.9194, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6318149745506147e-05, |
| "loss": 3.9362, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.630976379799563e-05, |
| "loss": 3.9243, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.630137785048511e-05, |
| "loss": 3.9192, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.629299190297459e-05, |
| "loss": 3.9258, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.62846223342678e-05, |
| "loss": 3.9275, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.627623638675728e-05, |
| "loss": 3.9244, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.626785043924676e-05, |
| "loss": 3.914, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.625946449173624e-05, |
| "loss": 3.9154, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.0067596435546875, |
| "eval_runtime": 286.5064, |
| "eval_samples_per_second": 1331.876, |
| "eval_steps_per_second": 41.622, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6251078544225723e-05, |
| "loss": 3.9131, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6242692596715203e-05, |
| "loss": 3.913, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6234306649204683e-05, |
| "loss": 3.9352, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6225920701694156e-05, |
| "loss": 3.9316, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6217534754183636e-05, |
| "loss": 3.9316, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6209148806673116e-05, |
| "loss": 3.9153, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.62007628591626e-05, |
| "loss": 3.9263, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.619237691165208e-05, |
| "loss": 3.911, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.618399096414156e-05, |
| "loss": 3.9317, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.617560501663104e-05, |
| "loss": 3.9257, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.616721906912052e-05, |
| "loss": 3.9232, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.615883312161e-05, |
| "loss": 3.9334, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6150463552903208e-05, |
| "loss": 3.915, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.614207760539269e-05, |
| "loss": 3.9222, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.613369165788217e-05, |
| "loss": 3.9114, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.612530571037165e-05, |
| "loss": 3.9204, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.611691976286113e-05, |
| "loss": 3.9152, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.610853381535061e-05, |
| "loss": 3.9162, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.610014786784009e-05, |
| "loss": 3.9183, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.609176192032957e-05, |
| "loss": 3.9426, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6083392351622783e-05, |
| "loss": 3.9225, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6075006404112263e-05, |
| "loss": 3.9272, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6066620456601743e-05, |
| "loss": 3.9241, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6058234509091223e-05, |
| "loss": 3.9329, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6049848561580696e-05, |
| "loss": 3.9166, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6041478992873912e-05, |
| "loss": 3.9232, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6033093045363392e-05, |
| "loss": 3.914, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.602470709785287e-05, |
| "loss": 3.9137, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.601632115034235e-05, |
| "loss": 3.9139, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.600793520283183e-05, |
| "loss": 3.9209, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.599954925532131e-05, |
| "loss": 3.9174, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.599116330781079e-05, |
| "loss": 3.9289, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.598277736030027e-05, |
| "loss": 3.9227, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.597440779159348e-05, |
| "loss": 3.9272, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5966038222886697e-05, |
| "loss": 3.9178, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.595765227537617e-05, |
| "loss": 3.9295, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.594926632786565e-05, |
| "loss": 3.9115, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.594088038035513e-05, |
| "loss": 3.9254, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5932494432844613e-05, |
| "loss": 3.9042, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5924124864137823e-05, |
| "loss": 3.9201, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5915738916627302e-05, |
| "loss": 3.9158, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.590736934792052e-05, |
| "loss": 3.9317, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.589898340040999e-05, |
| "loss": 3.9149, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5890597452899475e-05, |
| "loss": 3.9238, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5882211505388955e-05, |
| "loss": 3.9255, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5873825557878435e-05, |
| "loss": 3.9293, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5865439610367915e-05, |
| "loss": 3.9221, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5857070041661124e-05, |
| "loss": 3.9117, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5848684094150604e-05, |
| "loss": 3.9052, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5840314525443816e-05, |
| "loss": 3.928, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5831928577933296e-05, |
| "loss": 3.9263, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5823542630422776e-05, |
| "loss": 3.9184, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5815156682912256e-05, |
| "loss": 3.9123, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5806770735401736e-05, |
| "loss": 3.9185, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5798384787891216e-05, |
| "loss": 3.901, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5789998840380696e-05, |
| "loss": 3.9234, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5781612892870176e-05, |
| "loss": 3.9028, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5773226945359656e-05, |
| "loss": 3.9192, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.576484099784914e-05, |
| "loss": 3.9248, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.575645505033862e-05, |
| "loss": 3.9123, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.57480691028281e-05, |
| "loss": 3.9075, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.573968315531758e-05, |
| "loss": 3.9182, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5731297207807052e-05, |
| "loss": 3.9033, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5722911260296532e-05, |
| "loss": 3.9094, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5714541691589748e-05, |
| "loss": 3.9222, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.570615574407923e-05, |
| "loss": 3.9172, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5697769796568705e-05, |
| "loss": 3.9092, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5689383849058185e-05, |
| "loss": 3.9039, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5680997901547665e-05, |
| "loss": 3.9148, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5672611954037145e-05, |
| "loss": 3.9171, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5664226006526624e-05, |
| "loss": 3.921, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5655840059016108e-05, |
| "loss": 3.9152, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5647486869113053e-05, |
| "loss": 3.9214, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5639100921602526e-05, |
| "loss": 3.9239, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5630714974092006e-05, |
| "loss": 3.9117, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5622329026581486e-05, |
| "loss": 3.9151, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.561394307907097e-05, |
| "loss": 3.9194, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.560555713156045e-05, |
| "loss": 3.9177, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.559717118404993e-05, |
| "loss": 3.9145, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5588801615343138e-05, |
| "loss": 3.9135, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5580415667832618e-05, |
| "loss": 3.9204, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5572029720322098e-05, |
| "loss": 3.927, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5563643772811578e-05, |
| "loss": 3.9182, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.555525782530106e-05, |
| "loss": 3.9021, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.554687187779054e-05, |
| "loss": 3.9039, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.553848593028002e-05, |
| "loss": 3.9133, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.553011636157323e-05, |
| "loss": 3.9209, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.552173041406271e-05, |
| "loss": 3.9167, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5513360845355923e-05, |
| "loss": 3.9083, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5504974897845403e-05, |
| "loss": 3.9211, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5496588950334883e-05, |
| "loss": 3.9115, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5488203002824363e-05, |
| "loss": 3.9096, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5479817055313843e-05, |
| "loss": 3.9146, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5471431107803323e-05, |
| "loss": 3.9199, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5463045160292803e-05, |
| "loss": 3.9277, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5454659212782283e-05, |
| "loss": 3.9089, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5446273265271763e-05, |
| "loss": 3.8997, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5437887317761243e-05, |
| "loss": 3.9192, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.542950137025072e-05, |
| "loss": 3.9117, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.54211154227402e-05, |
| "loss": 3.9268, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5412762232837144e-05, |
| "loss": 3.9221, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5404376285326624e-05, |
| "loss": 3.911, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5395990337816107e-05, |
| "loss": 3.9046, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5387604390305587e-05, |
| "loss": 3.9216, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5379218442795067e-05, |
| "loss": 3.9016, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.537083249528454e-05, |
| "loss": 3.9141, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5362462926577756e-05, |
| "loss": 3.9099, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5354076979067236e-05, |
| "loss": 3.9161, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5345691031556716e-05, |
| "loss": 3.9001, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5337305084046193e-05, |
| "loss": 3.9194, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5328919136535673e-05, |
| "loss": 3.9074, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5320533189025153e-05, |
| "loss": 3.9086, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5312163620318362e-05, |
| "loss": 3.9175, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5303777672807845e-05, |
| "loss": 3.9078, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5295391725297325e-05, |
| "loss": 3.9304, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5287005777786805e-05, |
| "loss": 3.9241, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5278619830276285e-05, |
| "loss": 3.9176, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5270233882765765e-05, |
| "loss": 3.9089, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5261847935255245e-05, |
| "loss": 3.9151, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5253461987744725e-05, |
| "loss": 3.9146, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5245092419037934e-05, |
| "loss": 3.9071, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5236722850331146e-05, |
| "loss": 3.9219, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5228336902820626e-05, |
| "loss": 3.9153, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5219950955310106e-05, |
| "loss": 3.9122, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5211565007799586e-05, |
| "loss": 3.9116, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5203179060289066e-05, |
| "loss": 3.9148, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5194793112778546e-05, |
| "loss": 3.9072, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5186407165268026e-05, |
| "loss": 3.9193, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.517802121775751e-05, |
| "loss": 3.9029, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.516965164905072e-05, |
| "loss": 3.9223, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5161282080343928e-05, |
| "loss": 3.9198, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.515291251163714e-05, |
| "loss": 3.9182, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.514452656412662e-05, |
| "loss": 3.9105, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.51361406166161e-05, |
| "loss": 3.9115, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.512775466910558e-05, |
| "loss": 3.9205, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.511936872159506e-05, |
| "loss": 3.9196, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.511098277408454e-05, |
| "loss": 3.9095, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.510259682657402e-05, |
| "loss": 3.9218, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.50942108790635e-05, |
| "loss": 3.9182, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.508582493155298e-05, |
| "loss": 3.9233, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5077438984042463e-05, |
| "loss": 3.914, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5069053036531943e-05, |
| "loss": 3.9273, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5060667089021423e-05, |
| "loss": 3.9197, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5052281141510903e-05, |
| "loss": 3.9118, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5043911572804112e-05, |
| "loss": 3.9165, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5035542004097325e-05, |
| "loss": 3.9218, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5027156056586805e-05, |
| "loss": 3.9135, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5018770109076285e-05, |
| "loss": 3.9044, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5010384161565765e-05, |
| "loss": 3.9157, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.002975940704346, |
| "eval_runtime": 293.1724, |
| "eval_samples_per_second": 1301.592, |
| "eval_steps_per_second": 40.676, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.5001998214055244e-05, |
| "loss": 3.9098, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4993628645348454e-05, |
| "loss": 3.9049, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4985242697837934e-05, |
| "loss": 3.9245, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4976856750327414e-05, |
| "loss": 3.9245, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4968470802816893e-05, |
| "loss": 3.9217, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4960084855306373e-05, |
| "loss": 3.9134, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4951715286599583e-05, |
| "loss": 3.9191, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4943329339089062e-05, |
| "loss": 3.9002, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4934943391578542e-05, |
| "loss": 3.924, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4926557444068026e-05, |
| "loss": 3.9219, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4918171496557506e-05, |
| "loss": 3.9163, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4909818306654447e-05, |
| "loss": 3.9238, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4901448737947656e-05, |
| "loss": 3.91, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.489306279043714e-05, |
| "loss": 3.915, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.488467684292662e-05, |
| "loss": 3.9056, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.48762908954161e-05, |
| "loss": 3.9084, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.486790494790558e-05, |
| "loss": 3.9071, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4859519000395056e-05, |
| "loss": 3.9079, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4851133052884536e-05, |
| "loss": 3.9109, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4842747105374016e-05, |
| "loss": 3.933, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4834361157863496e-05, |
| "loss": 3.9178, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.482597521035298e-05, |
| "loss": 3.9232, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.481760564164619e-05, |
| "loss": 3.9162, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.480921969413567e-05, |
| "loss": 3.9226, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.480083374662515e-05, |
| "loss": 3.9106, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.479244779911463e-05, |
| "loss": 3.9163, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.478406185160411e-05, |
| "loss": 3.9114, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.477567590409359e-05, |
| "loss": 3.9049, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4767289956583068e-05, |
| "loss": 3.9045, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4758904009072548e-05, |
| "loss": 3.9133, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.475053444036576e-05, |
| "loss": 3.9113, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4742148492855237e-05, |
| "loss": 3.9206, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4733762545344717e-05, |
| "loss": 3.9166, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.47253765978342e-05, |
| "loss": 3.9189, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4717007029127413e-05, |
| "loss": 3.9097, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.470862108161689e-05, |
| "loss": 3.9244, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.470023513410637e-05, |
| "loss": 3.9056, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.469184918659585e-05, |
| "loss": 3.9147, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.468346323908533e-05, |
| "loss": 3.8982, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.467507729157481e-05, |
| "loss": 3.9135, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.466669134406429e-05, |
| "loss": 3.9103, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.465830539655377e-05, |
| "loss": 3.9232, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.464991944904325e-05, |
| "loss": 3.9087, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.464153350153273e-05, |
| "loss": 3.9151, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4633163932825942e-05, |
| "loss": 3.9185, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.462477798531542e-05, |
| "loss": 3.919, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.46163920378049e-05, |
| "loss": 3.9161, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4608022469098114e-05, |
| "loss": 3.9042, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.459963652158759e-05, |
| "loss": 3.8958, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4591266952880803e-05, |
| "loss": 3.9229, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4582881005370283e-05, |
| "loss": 3.9167, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4574495057859763e-05, |
| "loss": 3.911, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4566109110349243e-05, |
| "loss": 3.9093, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4557723162838723e-05, |
| "loss": 3.9092, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4549337215328203e-05, |
| "loss": 3.8941, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4540951267817683e-05, |
| "loss": 3.9092, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4532565320307163e-05, |
| "loss": 3.901, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4524179372796643e-05, |
| "loss": 3.9083, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4515793425286123e-05, |
| "loss": 3.9199, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4507407477775603e-05, |
| "loss": 3.9054, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4499037909068815e-05, |
| "loss": 3.8971, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4490651961558295e-05, |
| "loss": 3.9152, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4482266014047772e-05, |
| "loss": 3.8931, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4473880066537252e-05, |
| "loss": 3.904, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.446549411902673e-05, |
| "loss": 3.9122, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445710817151621e-05, |
| "loss": 3.9129, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4448738602809424e-05, |
| "loss": 3.9018, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4440369034102637e-05, |
| "loss": 3.8992, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4431983086592117e-05, |
| "loss": 3.9029, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4423597139081597e-05, |
| "loss": 3.9112, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4415211191571073e-05, |
| "loss": 3.9113, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4406825244060556e-05, |
| "loss": 3.9081, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4398439296550036e-05, |
| "loss": 3.9149, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4390053349039516e-05, |
| "loss": 3.9206, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4381667401528996e-05, |
| "loss": 3.9078, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4373281454018476e-05, |
| "loss": 3.9005, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4364911885311685e-05, |
| "loss": 3.9175, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4356525937801165e-05, |
| "loss": 3.9099, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.434813999029065e-05, |
| "loss": 3.903, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.433975404278013e-05, |
| "loss": 3.9067, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4331368095269605e-05, |
| "loss": 3.9135, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4322998526562818e-05, |
| "loss": 3.9222, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4314612579052298e-05, |
| "loss": 3.9068, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4306226631541778e-05, |
| "loss": 3.8925, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.429785706283499e-05, |
| "loss": 3.904, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.428947111532447e-05, |
| "loss": 3.8988, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.428108516781395e-05, |
| "loss": 3.9123, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4272699220303427e-05, |
| "loss": 3.9093, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.426432965159664e-05, |
| "loss": 3.9002, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.425594370408612e-05, |
| "loss": 3.9154, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.424757413537933e-05, |
| "loss": 3.9029, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.423918818786881e-05, |
| "loss": 3.9057, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.423080224035829e-05, |
| "loss": 3.903, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.422241629284777e-05, |
| "loss": 3.9104, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.421403034533725e-05, |
| "loss": 3.9209, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4205644397826728e-05, |
| "loss": 3.9099, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.419725845031621e-05, |
| "loss": 3.8898, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.418887250280569e-05, |
| "loss": 3.9087, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.418048655529517e-05, |
| "loss": 3.9062, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.417211698658838e-05, |
| "loss": 3.9127, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.416373103907786e-05, |
| "loss": 3.9192, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.415534509156734e-05, |
| "loss": 3.9059, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.414695914405682e-05, |
| "loss": 3.8987, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4138573196546303e-05, |
| "loss": 3.9137, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4130187249035783e-05, |
| "loss": 3.8897, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.412180130152526e-05, |
| "loss": 3.9082, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4113431732818472e-05, |
| "loss": 3.9014, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4105045785307952e-05, |
| "loss": 3.9113, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4096659837797432e-05, |
| "loss": 3.8956, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4088273890286912e-05, |
| "loss": 3.9127, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4079887942776392e-05, |
| "loss": 3.8979, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4071501995265872e-05, |
| "loss": 3.9015, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.406313242655908e-05, |
| "loss": 3.911, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.405474647904856e-05, |
| "loss": 3.9048, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.404636053153804e-05, |
| "loss": 3.92, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.403797458402752e-05, |
| "loss": 3.9141, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4029588636517004e-05, |
| "loss": 3.906, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4021202689006484e-05, |
| "loss": 3.9086, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4012816741495964e-05, |
| "loss": 3.9014, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.400443079398544e-05, |
| "loss": 3.9113, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3996061225278653e-05, |
| "loss": 3.8995, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3987691656571866e-05, |
| "loss": 3.9169, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3979305709061346e-05, |
| "loss": 3.9032, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3970919761550826e-05, |
| "loss": 3.9019, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3962533814040306e-05, |
| "loss": 3.9035, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3954164245333515e-05, |
| "loss": 3.9107, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3945778297822995e-05, |
| "loss": 3.9004, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3937392350312475e-05, |
| "loss": 3.9109, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3929006402801958e-05, |
| "loss": 3.8982, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3920620455291438e-05, |
| "loss": 3.914, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3912234507780915e-05, |
| "loss": 3.9111, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3903864939074127e-05, |
| "loss": 3.9115, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3895478991563607e-05, |
| "loss": 3.9033, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3887093044053087e-05, |
| "loss": 3.9022, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3878707096542567e-05, |
| "loss": 3.9174, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3870321149032047e-05, |
| "loss": 3.9088, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.386195158032526e-05, |
| "loss": 3.9031, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3853565632814736e-05, |
| "loss": 3.9159, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3845179685304216e-05, |
| "loss": 3.9103, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3836793737793696e-05, |
| "loss": 3.9187, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3828407790283176e-05, |
| "loss": 3.9085, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.382003822157639e-05, |
| "loss": 3.9147, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.381165227406587e-05, |
| "loss": 3.9121, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.380326632655535e-05, |
| "loss": 3.9061, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3794880379044828e-05, |
| "loss": 3.9101, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3786494431534308e-05, |
| "loss": 3.9115, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3778108484023788e-05, |
| "loss": 3.909, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3769722536513268e-05, |
| "loss": 3.8964, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3761336589002748e-05, |
| "loss": 3.91, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.9997189044952393, |
| "eval_runtime": 297.2732, |
| "eval_samples_per_second": 1283.638, |
| "eval_steps_per_second": 40.115, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.375296702029596e-05, |
| "loss": 3.8999, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.374458107278544e-05, |
| "loss": 3.8975, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3736195125274917e-05, |
| "loss": 3.917, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3727809177764397e-05, |
| "loss": 3.9165, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.371942323025388e-05, |
| "loss": 3.9128, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.371103728274336e-05, |
| "loss": 3.9077, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.370265133523284e-05, |
| "loss": 3.9076, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.369426538772232e-05, |
| "loss": 3.8954, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.36858794402118e-05, |
| "loss": 3.9119, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3677493492701277e-05, |
| "loss": 3.9164, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3669107545190757e-05, |
| "loss": 3.9078, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3660721597680237e-05, |
| "loss": 3.9147, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.365235202897345e-05, |
| "loss": 3.9042, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.364396608146293e-05, |
| "loss": 3.9081, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.363558013395241e-05, |
| "loss": 3.8993, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.362719418644189e-05, |
| "loss": 3.9012, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.361880823893137e-05, |
| "loss": 3.9003, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.361042229142085e-05, |
| "loss": 3.8998, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.360203634391033e-05, |
| "loss": 3.9058, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.359365039639981e-05, |
| "loss": 3.9192, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.358526444888929e-05, |
| "loss": 3.9149, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.357687850137877e-05, |
| "loss": 3.9148, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.356849255386825e-05, |
| "loss": 3.906, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3560122985161458e-05, |
| "loss": 3.9169, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3551737037650938e-05, |
| "loss": 3.9008, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.354335109014042e-05, |
| "loss": 3.9125, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.35349651426299e-05, |
| "loss": 3.9027, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.352657919511938e-05, |
| "loss": 3.8963, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.351820962641259e-05, |
| "loss": 3.9, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.350982367890207e-05, |
| "loss": 3.9027, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.350143773139155e-05, |
| "loss": 3.9071, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.349305178388103e-05, |
| "loss": 3.9099, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3484665836370513e-05, |
| "loss": 3.914, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.347627988885999e-05, |
| "loss": 3.9114, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3467910320153202e-05, |
| "loss": 3.9022, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3459524372642682e-05, |
| "loss": 3.9134, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.345113842513216e-05, |
| "loss": 3.9033, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3442752477621642e-05, |
| "loss": 3.9003, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3434366530111122e-05, |
| "loss": 3.8963, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3425980582600602e-05, |
| "loss": 3.902, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3417594635090082e-05, |
| "loss": 3.9074, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3409208687579562e-05, |
| "loss": 3.9127, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3400822740069042e-05, |
| "loss": 3.9028, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.339243679255852e-05, |
| "loss": 3.9064, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.338406722385173e-05, |
| "loss": 3.9125, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3375681276341214e-05, |
| "loss": 3.911, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3367295328830694e-05, |
| "loss": 3.9106, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.335890938132017e-05, |
| "loss": 3.8975, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.335052343380965e-05, |
| "loss": 3.8848, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3342153865102863e-05, |
| "loss": 3.9175, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3333767917592343e-05, |
| "loss": 3.9099, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3325381970081823e-05, |
| "loss": 3.9052, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3316996022571303e-05, |
| "loss": 3.9028, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3308626453864516e-05, |
| "loss": 3.8967, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3300240506353992e-05, |
| "loss": 3.8898, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3291854558843472e-05, |
| "loss": 3.8998, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3283468611332952e-05, |
| "loss": 3.8917, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3275099042626168e-05, |
| "loss": 3.9023, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3266713095115645e-05, |
| "loss": 3.9118, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3258327147605125e-05, |
| "loss": 3.8972, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3249941200094605e-05, |
| "loss": 3.8906, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3241571631387814e-05, |
| "loss": 3.9091, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3233185683877297e-05, |
| "loss": 3.8888, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.322481611517051e-05, |
| "loss": 3.8938, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.321643016765999e-05, |
| "loss": 3.9065, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3208044220149466e-05, |
| "loss": 3.9049, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3199658272638946e-05, |
| "loss": 3.895, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3191272325128426e-05, |
| "loss": 3.8908, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3182886377617906e-05, |
| "loss": 3.8941, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.317450043010739e-05, |
| "loss": 3.9018, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.316611448259687e-05, |
| "loss": 3.9082, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.315772853508635e-05, |
| "loss": 3.8995, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3149342587575826e-05, |
| "loss": 3.9063, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3140956640065306e-05, |
| "loss": 3.9121, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3132570692554786e-05, |
| "loss": 3.908, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3124201123847998e-05, |
| "loss": 3.8903, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3115815176337478e-05, |
| "loss": 3.9076, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3107429228826958e-05, |
| "loss": 3.907, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3099043281316438e-05, |
| "loss": 3.8945, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3090673712609647e-05, |
| "loss": 3.9056, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3082287765099127e-05, |
| "loss": 3.9025, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3073901817588607e-05, |
| "loss": 3.9147, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.306551587007809e-05, |
| "loss": 3.9029, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3057162680175032e-05, |
| "loss": 3.8822, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3048776732664512e-05, |
| "loss": 3.8982, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.304040716395772e-05, |
| "loss": 3.8943, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3032021216447204e-05, |
| "loss": 3.9024, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3023635268936684e-05, |
| "loss": 3.9029, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3015265700229893e-05, |
| "loss": 3.8958, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3006879752719373e-05, |
| "loss": 3.9063, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2998493805208853e-05, |
| "loss": 3.8995, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2990107857698333e-05, |
| "loss": 3.8932, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2981721910187813e-05, |
| "loss": 3.901, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2973335962677293e-05, |
| "loss": 3.9065, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2964950015166773e-05, |
| "loss": 3.9126, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2956564067656253e-05, |
| "loss": 3.9004, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2948178120145733e-05, |
| "loss": 3.8859, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2939792172635213e-05, |
| "loss": 3.9007, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2931406225124693e-05, |
| "loss": 3.8995, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2923036656417905e-05, |
| "loss": 3.9069, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2914650708907385e-05, |
| "loss": 3.9113, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2906264761396865e-05, |
| "loss": 3.8927, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2897878813886345e-05, |
| "loss": 3.894, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2889492866375825e-05, |
| "loss": 3.9075, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2881106918865302e-05, |
| "loss": 3.8861, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2872720971354782e-05, |
| "loss": 3.9031, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2864335023844262e-05, |
| "loss": 3.8895, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2855949076333745e-05, |
| "loss": 3.9051, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2847563128823225e-05, |
| "loss": 3.8883, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2839193560116434e-05, |
| "loss": 3.9027, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2830807612605914e-05, |
| "loss": 3.8977, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2822421665095394e-05, |
| "loss": 3.8919, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2814035717584874e-05, |
| "loss": 3.9037, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2805649770074354e-05, |
| "loss": 3.8956, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2797263822563834e-05, |
| "loss": 3.9149, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2788877875053314e-05, |
| "loss": 3.9124, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2780508306346526e-05, |
| "loss": 3.8976, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2772122358836006e-05, |
| "loss": 3.9053, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2763736411325483e-05, |
| "loss": 3.8949, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2755350463814963e-05, |
| "loss": 3.9028, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2746964516304446e-05, |
| "loss": 3.8975, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2738578568793926e-05, |
| "loss": 3.9065, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2730192621283406e-05, |
| "loss": 3.8987, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2721806673772886e-05, |
| "loss": 3.8946, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2713437105066095e-05, |
| "loss": 3.9018, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2705051157555575e-05, |
| "loss": 3.9015, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2696665210045055e-05, |
| "loss": 3.8904, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2688295641338268e-05, |
| "loss": 3.9045, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2679909693827748e-05, |
| "loss": 3.8899, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2671523746317227e-05, |
| "loss": 3.9149, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2663137798806707e-05, |
| "loss": 3.8977, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2654751851296187e-05, |
| "loss": 3.9091, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.26463822825894e-05, |
| "loss": 3.8951, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.263799633507888e-05, |
| "loss": 3.8993, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.262961038756836e-05, |
| "loss": 3.9083, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2621224440057836e-05, |
| "loss": 3.9003, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2612838492547316e-05, |
| "loss": 3.898, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.260446892384053e-05, |
| "loss": 3.9096, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.259608297633001e-05, |
| "loss": 3.9053, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.258769702881949e-05, |
| "loss": 3.911, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.257931108130897e-05, |
| "loss": 3.8975, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.257092513379845e-05, |
| "loss": 3.9105, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.256255556509166e-05, |
| "loss": 3.902, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2554169617581138e-05, |
| "loss": 3.9002, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.254578367007062e-05, |
| "loss": 3.9041, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.25373977225601e-05, |
| "loss": 3.9025, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.252901177504958e-05, |
| "loss": 3.9086, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.252064220634279e-05, |
| "loss": 3.8825, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.251225625883227e-05, |
| "loss": 3.9074, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.996596097946167, |
| "eval_runtime": 294.2951, |
| "eval_samples_per_second": 1296.627, |
| "eval_steps_per_second": 40.521, |
| "step": 1679040 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 6.937826424542801e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|