| { |
| "best_metric": 4.014286041259766, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/2/checkpoints/checkpoint-1755360", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1755360, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8206, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.554, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0624, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 7.0013, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.9543, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.9143, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7423, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 6.6408, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 6.5414, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 6.4563, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 6.3913, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 6.3289, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 6.261, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989101543997071e-05, |
| "loss": 6.1967, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988262949246018e-05, |
| "loss": 6.1493, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987424354494966e-05, |
| "loss": 6.0967, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986585759743914e-05, |
| "loss": 6.0539, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985747164992862e-05, |
| "loss": 6.0146, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98490857024181e-05, |
| "loss": 5.9802, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984069975490758e-05, |
| "loss": 5.9423, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983231380739706e-05, |
| "loss": 5.9104, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 5.8715, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 5.8492, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 5.8079, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 5.7899, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790384069844466e-05, |
| "loss": 5.7587, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 5.7358, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 5.7155, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 5.6849, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 5.6593, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 5.6479, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 5.6353, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 5.6146, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 5.5901, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 5.5845, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 5.5581, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969820416244367e-05, |
| "loss": 5.5454, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 5.5124, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 5.5149, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 5.493, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 5.4842, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 5.4728, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964790485618428e-05, |
| "loss": 5.4662, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 5.4354, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 5.4295, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 5.4243, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 5.409, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 5.4109, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959758917112116e-05, |
| "loss": 5.3885, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958920322361064e-05, |
| "loss": 5.3679, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958083365490385e-05, |
| "loss": 5.3762, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957244770739333e-05, |
| "loss": 5.3544, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956406175988281e-05, |
| "loss": 5.3493, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955567581237229e-05, |
| "loss": 5.3211, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954728986486177e-05, |
| "loss": 5.3322, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9538920296154976e-05, |
| "loss": 5.3092, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530534348644456e-05, |
| "loss": 5.3357, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522148401133936e-05, |
| "loss": 5.2975, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951376245362342e-05, |
| "loss": 5.2987, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95053765061129e-05, |
| "loss": 5.2902, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2708, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 5.2577, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948023504238507e-05, |
| "loss": 5.2586, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947184909487455e-05, |
| "loss": 5.2407, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 5.2347, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945510995746097e-05, |
| "loss": 5.2516, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 5.2333, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 5.218, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 5.1964, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942156616741889e-05, |
| "loss": 5.1919, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941318021990837e-05, |
| "loss": 5.2002, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404794272397856e-05, |
| "loss": 5.1976, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396408324887336e-05, |
| "loss": 5.1903, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388022377376816e-05, |
| "loss": 5.1874, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379636429866296e-05, |
| "loss": 5.1781, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371250482355776e-05, |
| "loss": 5.171, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362880913648985e-05, |
| "loss": 5.1569, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354494966138465e-05, |
| "loss": 5.1484, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346109018627945e-05, |
| "loss": 5.1412, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 5.1299, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 5.137, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 5.1269, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 5.13, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 5.1094, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9295809712368554e-05, |
| "loss": 5.0915, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9287423764858034e-05, |
| "loss": 5.1058, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9279037817347514e-05, |
| "loss": 5.1011, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9270651869836994e-05, |
| "loss": 5.0976, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9262265922326474e-05, |
| "loss": 5.0828, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9253879974815954e-05, |
| "loss": 5.0861, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9245494027305433e-05, |
| "loss": 5.0727, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923712445859864e-05, |
| "loss": 5.0668, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922873851108812e-05, |
| "loss": 5.0653, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92203525635776e-05, |
| "loss": 5.0611, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921196661606708e-05, |
| "loss": 5.0627, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920358066855656e-05, |
| "loss": 5.0566, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919519472104604e-05, |
| "loss": 5.0542, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918680877353552e-05, |
| "loss": 5.0321, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917843920482874e-05, |
| "loss": 5.0285, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917005325731822e-05, |
| "loss": 5.0269, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91616673098077e-05, |
| "loss": 5.0268, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915328136229718e-05, |
| "loss": 5.0191, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914491179359039e-05, |
| "loss": 5.012, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913652584607987e-05, |
| "loss": 5.0031, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128156277373076e-05, |
| "loss": 5.0107, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119770329862556e-05, |
| "loss": 4.9842, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.9914, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9103014813645245e-05, |
| "loss": 4.9879, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909462886613473e-05, |
| "loss": 4.98, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908624291862421e-05, |
| "loss": 4.9758, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907785697111369e-05, |
| "loss": 4.9802, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906947102360317e-05, |
| "loss": 4.967, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906108507609265e-05, |
| "loss": 4.9692, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905269912858213e-05, |
| "loss": 4.9623, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.9503, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903594361236482e-05, |
| "loss": 4.9566, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90275576648543e-05, |
| "loss": 4.9542, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901918809614751e-05, |
| "loss": 4.9492, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901080214863699e-05, |
| "loss": 4.9355, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900241620112647e-05, |
| "loss": 4.9415, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899403025361595e-05, |
| "loss": 4.9435, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.898564430610543e-05, |
| "loss": 4.9307, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.897725835859491e-05, |
| "loss": 4.9178, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968872411084396e-05, |
| "loss": 4.9269, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896048646357387e-05, |
| "loss": 4.9257, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.895210051606335e-05, |
| "loss": 4.9131, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.894371456855283e-05, |
| "loss": 4.908, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.893532862104231e-05, |
| "loss": 4.8988, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892695905233552e-05, |
| "loss": 4.9032, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918573104825e-05, |
| "loss": 4.8938, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891018715731448e-05, |
| "loss": 4.9078, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890180120980396e-05, |
| "loss": 4.8976, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889341526229344e-05, |
| "loss": 4.8796, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888502931478292e-05, |
| "loss": 4.887, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8876659746076134e-05, |
| "loss": 4.885, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8868273798565614e-05, |
| "loss": 4.8804, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8859887851055094e-05, |
| "loss": 4.884, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8851501903544574e-05, |
| "loss": 4.8704, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8843115956034054e-05, |
| "loss": 4.873, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8834730008523534e-05, |
| "loss": 4.8786, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8826344061013014e-05, |
| "loss": 4.8619, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8817958113502494e-05, |
| "loss": 4.8579, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8809572165991974e-05, |
| "loss": 4.8597, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880120259728518e-05, |
| "loss": 4.8461, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879281664977466e-05, |
| "loss": 4.8452, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878443070226414e-05, |
| "loss": 4.8503, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877604475475362e-05, |
| "loss": 4.8354, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87676588072431e-05, |
| "loss": 4.8393, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875927285973258e-05, |
| "loss": 4.8436, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875088691222206e-05, |
| "loss": 4.8395, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.797691345214844, |
| "eval_runtime": 298.3239, |
| "eval_samples_per_second": 1279.116, |
| "eval_steps_per_second": 39.973, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874251734351528e-05, |
| "loss": 4.8214, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873413139600475e-05, |
| "loss": 4.8208, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.872574544849423e-05, |
| "loss": 4.8377, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871735950098371e-05, |
| "loss": 4.8212, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.870897355347319e-05, |
| "loss": 4.8322, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.870058760596267e-05, |
| "loss": 4.8043, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.869220165845215e-05, |
| "loss": 4.8221, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.868383208974536e-05, |
| "loss": 4.8045, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675462521038576e-05, |
| "loss": 4.7989, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667076573528056e-05, |
| "loss": 4.8059, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865870700482127e-05, |
| "loss": 4.8087, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865032105731075e-05, |
| "loss": 4.8093, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864195148860396e-05, |
| "loss": 4.7914, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.863356554109344e-05, |
| "loss": 4.7907, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862517959358292e-05, |
| "loss": 4.783, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.86167936460724e-05, |
| "loss": 4.7841, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8608407698561874e-05, |
| "loss": 4.7822, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8600021751051354e-05, |
| "loss": 4.7827, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8591635803540834e-05, |
| "loss": 4.777, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8583249856030314e-05, |
| "loss": 4.7977, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8574863908519794e-05, |
| "loss": 4.7717, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8566477961009274e-05, |
| "loss": 4.7818, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558092013498754e-05, |
| "loss": 4.7678, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854970606598824e-05, |
| "loss": 4.7768, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.854132011847772e-05, |
| "loss": 4.7645, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.853295054977093e-05, |
| "loss": 4.7636, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.852456460226041e-05, |
| "loss": 4.7584, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851617865474989e-05, |
| "loss": 4.7553, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850779270723937e-05, |
| "loss": 4.7384, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849940675972885e-05, |
| "loss": 4.754, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849102081221833e-05, |
| "loss": 4.7511, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.848263486470781e-05, |
| "loss": 4.7543, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847424891719729e-05, |
| "loss": 4.7446, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84658793484905e-05, |
| "loss": 4.747, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845750977978371e-05, |
| "loss": 4.7382, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8449123832273194e-05, |
| "loss": 4.7346, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8440737884762674e-05, |
| "loss": 4.7193, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8432351937252154e-05, |
| "loss": 4.7271, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8423965989741634e-05, |
| "loss": 4.7247, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8415580042231114e-05, |
| "loss": 4.7233, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840719409472059e-05, |
| "loss": 4.7271, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839880814721007e-05, |
| "loss": 4.7266, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839043857850328e-05, |
| "loss": 4.7149, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838205263099276e-05, |
| "loss": 4.7205, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8373666683482236e-05, |
| "loss": 4.7189, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8365280735971716e-05, |
| "loss": 4.7052, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8356894788461196e-05, |
| "loss": 4.7188, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8348508840950676e-05, |
| "loss": 4.706, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834013927224389e-05, |
| "loss": 4.6933, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.833175332473337e-05, |
| "loss": 4.7115, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.832336737722285e-05, |
| "loss": 4.6952, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.831498142971233e-05, |
| "loss": 4.6934, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.830659548220181e-05, |
| "loss": 4.682, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.829820953469129e-05, |
| "loss": 4.6918, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.82898399659845e-05, |
| "loss": 4.6794, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.828145401847398e-05, |
| "loss": 4.7121, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.827306807096346e-05, |
| "loss": 4.6811, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.826468212345294e-05, |
| "loss": 4.6934, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.825631255474615e-05, |
| "loss": 4.6897, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.824792660723563e-05, |
| "loss": 4.6764, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823954065972511e-05, |
| "loss": 4.6671, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.82311547122146e-05, |
| "loss": 4.6787, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8222768764704077e-05, |
| "loss": 4.6643, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8214399195997286e-05, |
| "loss": 4.6631, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8206013248486766e-05, |
| "loss": 4.6812, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8197627300976246e-05, |
| "loss": 4.6768, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8189241353465726e-05, |
| "loss": 4.6593, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8180855405955206e-05, |
| "loss": 4.654, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8172469458444685e-05, |
| "loss": 4.6533, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8164083510934165e-05, |
| "loss": 4.6649, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8155697563423645e-05, |
| "loss": 4.6636, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8147311615913125e-05, |
| "loss": 4.6621, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8138942047206334e-05, |
| "loss": 4.6618, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8130556099695814e-05, |
| "loss": 4.6632, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8122170152185294e-05, |
| "loss": 4.6612, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8113784204674774e-05, |
| "loss": 4.6458, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810541463596799e-05, |
| "loss": 4.6493, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809702868845747e-05, |
| "loss": 4.6463, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808864274094695e-05, |
| "loss": 4.6329, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808025679343642e-05, |
| "loss": 4.6478, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80718708459259e-05, |
| "loss": 4.6465, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806348489841538e-05, |
| "loss": 4.6491, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.80551153297086e-05, |
| "loss": 4.6288, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804672938219807e-05, |
| "loss": 4.6202, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803835981349129e-05, |
| "loss": 4.6377, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802997386598077e-05, |
| "loss": 4.633, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802158791847025e-05, |
| "loss": 4.638, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801320197095973e-05, |
| "loss": 4.6308, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.800481602344921e-05, |
| "loss": 4.6325, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7996446454742424e-05, |
| "loss": 4.6266, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79880605072319e-05, |
| "loss": 4.6188, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797967455972138e-05, |
| "loss": 4.6232, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797128861221086e-05, |
| "loss": 4.6226, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.796290266470034e-05, |
| "loss": 4.6279, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795451671718982e-05, |
| "loss": 4.6292, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79461307696793e-05, |
| "loss": 4.6255, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7937761200972506e-05, |
| "loss": 4.6116, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7929375253461986e-05, |
| "loss": 4.6107, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792098930595147e-05, |
| "loss": 4.6093, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791260335844095e-05, |
| "loss": 4.6193, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790421741093043e-05, |
| "loss": 4.6058, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.789583146341991e-05, |
| "loss": 4.6046, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.788744551590939e-05, |
| "loss": 4.5983, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787905956839887e-05, |
| "loss": 4.6122, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787067362088835e-05, |
| "loss": 4.586, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786230405218156e-05, |
| "loss": 4.5988, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.785391810467104e-05, |
| "loss": 4.6, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.784553215716052e-05, |
| "loss": 4.5937, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.783714620965e-05, |
| "loss": 4.5916, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782876026213948e-05, |
| "loss": 4.602, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782037431462896e-05, |
| "loss": 4.5867, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.781200474592217e-05, |
| "loss": 4.591, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.780361879841165e-05, |
| "loss": 4.5942, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795249229704866e-05, |
| "loss": 4.5823, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786863282194346e-05, |
| "loss": 4.5913, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7778477334683826e-05, |
| "loss": 4.5965, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7770091387173306e-05, |
| "loss": 4.5882, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7761705439662786e-05, |
| "loss": 4.5755, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775331949215226e-05, |
| "loss": 4.5883, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7744949923445475e-05, |
| "loss": 4.5865, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7736563975934955e-05, |
| "loss": 4.5784, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7728178028424435e-05, |
| "loss": 4.5726, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771979208091391e-05, |
| "loss": 4.576, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771140613340339e-05, |
| "loss": 4.5879, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7703020185892875e-05, |
| "loss": 4.5755, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7694634238382355e-05, |
| "loss": 4.5716, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7686248290871835e-05, |
| "loss": 4.5619, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7677862343361315e-05, |
| "loss": 4.5685, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7669476395850795e-05, |
| "loss": 4.5636, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7661106827144004e-05, |
| "loss": 4.5792, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7652720879633484e-05, |
| "loss": 4.575, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7644334932122964e-05, |
| "loss": 4.5551, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763596536341617e-05, |
| "loss": 4.5618, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762757941590565e-05, |
| "loss": 4.5655, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761920984719886e-05, |
| "loss": 4.564, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761082389968834e-05, |
| "loss": 4.5707, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.760243795217783e-05, |
| "loss": 4.5566, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.759405200466731e-05, |
| "loss": 4.5598, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758566605715679e-05, |
| "loss": 4.5671, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757728010964627e-05, |
| "loss": 4.5533, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756889416213575e-05, |
| "loss": 4.5545, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756050821462523e-05, |
| "loss": 4.5596, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755213864591844e-05, |
| "loss": 4.544, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754375269840792e-05, |
| "loss": 4.5479, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75353667508974e-05, |
| "loss": 4.5547, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752698080338688e-05, |
| "loss": 4.5406, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751859485587636e-05, |
| "loss": 4.5472, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751020890836584e-05, |
| "loss": 4.5504, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.750182296085532e-05, |
| "loss": 4.5474, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.510871410369873, |
| "eval_runtime": 296.5965, |
| "eval_samples_per_second": 1286.566, |
| "eval_steps_per_second": 40.206, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74934370133448e-05, |
| "loss": 4.5325, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748505106583428e-05, |
| "loss": 4.5329, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747666511832376e-05, |
| "loss": 4.5536, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746827917081324e-05, |
| "loss": 4.532, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745989322330272e-05, |
| "loss": 4.5484, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74515072757922e-05, |
| "loss": 4.5299, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.744312132828168e-05, |
| "loss": 4.5444, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.743473538077116e-05, |
| "loss": 4.5256, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426349433260637e-05, |
| "loss": 4.5355, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7417963485750117e-05, |
| "loss": 4.5314, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7409577538239597e-05, |
| "loss": 4.5356, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401191590729076e-05, |
| "loss": 4.5416, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392838400826015e-05, |
| "loss": 4.5214, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384452453315495e-05, |
| "loss": 4.5249, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376066505804975e-05, |
| "loss": 4.5194, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736768055829446e-05, |
| "loss": 4.5174, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735929461078394e-05, |
| "loss": 4.5201, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735090866327342e-05, |
| "loss": 4.5245, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73425227157629e-05, |
| "loss": 4.5174, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733413676825238e-05, |
| "loss": 4.5415, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732575082074186e-05, |
| "loss": 4.5209, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317364873231334e-05, |
| "loss": 4.5259, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7308978925720814e-05, |
| "loss": 4.5174, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7300592978210294e-05, |
| "loss": 4.5269, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7292207030699774e-05, |
| "loss": 4.5163, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728383746199298e-05, |
| "loss": 4.5107, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727545151448246e-05, |
| "loss": 4.5148, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726706556697194e-05, |
| "loss": 4.5087, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725869599826516e-05, |
| "loss": 4.4927, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725031005075464e-05, |
| "loss": 4.5096, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724192410324412e-05, |
| "loss": 4.515, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.72335381557336e-05, |
| "loss": 4.5109, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722515220822308e-05, |
| "loss": 4.5076, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721676626071256e-05, |
| "loss": 4.5082, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720838031320204e-05, |
| "loss": 4.5017, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719999436569152e-05, |
| "loss": 4.4965, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191608418181e-05, |
| "loss": 4.4929, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718323884947421e-05, |
| "loss": 4.4912, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717485290196369e-05, |
| "loss": 4.4925, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716646695445317e-05, |
| "loss": 4.493, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715808100694265e-05, |
| "loss": 4.4996, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714969505943213e-05, |
| "loss": 4.5, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7141325490725343e-05, |
| "loss": 4.4905, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7132939543214823e-05, |
| "loss": 4.4949, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7124553595704303e-05, |
| "loss": 4.4985, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711616764819378e-05, |
| "loss": 4.4804, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710779807948699e-05, |
| "loss": 4.4952, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709941213197647e-05, |
| "loss": 4.4883, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709102618446595e-05, |
| "loss": 4.4699, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708264023695543e-05, |
| "loss": 4.4901, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707425428944491e-05, |
| "loss": 4.4821, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706586834193439e-05, |
| "loss": 4.4765, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7057482394423865e-05, |
| "loss": 4.4729, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704909644691335e-05, |
| "loss": 4.4769, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704072687820657e-05, |
| "loss": 4.4643, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.703235730949978e-05, |
| "loss": 4.4936, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702397136198926e-05, |
| "loss": 4.4736, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701558541447874e-05, |
| "loss": 4.4809, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700719946696822e-05, |
| "loss": 4.4835, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69988135194577e-05, |
| "loss": 4.4683, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699042757194717e-05, |
| "loss": 4.46, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6982058003240386e-05, |
| "loss": 4.4751, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6973672055729866e-05, |
| "loss": 4.4614, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696528610821934e-05, |
| "loss": 4.4566, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695690016070882e-05, |
| "loss": 4.4767, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6948514213198306e-05, |
| "loss": 4.4742, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6940128265687786e-05, |
| "loss": 4.4579, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6931742318177266e-05, |
| "loss": 4.4532, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6923372749470475e-05, |
| "loss": 4.4559, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6914986801959955e-05, |
| "loss": 4.4631, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6906600854449435e-05, |
| "loss": 4.4666, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6898214906938915e-05, |
| "loss": 4.4658, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6889828959428395e-05, |
| "loss": 4.4672, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6881443011917875e-05, |
| "loss": 4.4654, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6873057064407355e-05, |
| "loss": 4.469, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6864687495700564e-05, |
| "loss": 4.4535, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856301548190044e-05, |
| "loss": 4.4567, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684793197948325e-05, |
| "loss": 4.4591, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683954603197274e-05, |
| "loss": 4.44, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683116008446222e-05, |
| "loss": 4.4567, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68227741369517e-05, |
| "loss": 4.4616, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681438818944118e-05, |
| "loss": 4.4598, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680600224193066e-05, |
| "loss": 4.4453, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679761629442014e-05, |
| "loss": 4.4337, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678923034690962e-05, |
| "loss": 4.4522, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.67808443993991e-05, |
| "loss": 4.4412, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677245845188858e-05, |
| "loss": 4.4573, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676408888318179e-05, |
| "loss": 4.4477, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675570293567127e-05, |
| "loss": 4.4503, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674731698816075e-05, |
| "loss": 4.4446, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673893104065023e-05, |
| "loss": 4.4341, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673056147194344e-05, |
| "loss": 4.4425, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722175524432924e-05, |
| "loss": 4.4407, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713789576922404e-05, |
| "loss": 4.4521, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705403629411884e-05, |
| "loss": 4.4543, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.669701768190136e-05, |
| "loss": 4.4458, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668863173439084e-05, |
| "loss": 4.4317, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668024578688032e-05, |
| "loss": 4.4367, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66718598393698e-05, |
| "loss": 4.4363, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.666347389185928e-05, |
| "loss": 4.4435, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665512070195622e-05, |
| "loss": 4.432, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664675113324943e-05, |
| "loss": 4.4337, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663836518573891e-05, |
| "loss": 4.4241, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662997923822839e-05, |
| "loss": 4.444, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662159329071788e-05, |
| "loss": 4.4153, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661320734320736e-05, |
| "loss": 4.4299, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660482139569683e-05, |
| "loss": 4.4233, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659643544818631e-05, |
| "loss": 4.428, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658804950067579e-05, |
| "loss": 4.423, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657966355316527e-05, |
| "loss": 4.4336, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657129398445848e-05, |
| "loss": 4.4261, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656290803694796e-05, |
| "loss": 4.4212, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655452208943744e-05, |
| "loss": 4.4295, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654613614192692e-05, |
| "loss": 4.4142, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65377501944164e-05, |
| "loss": 4.4331, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652936424690588e-05, |
| "loss": 4.4374, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6520994678199095e-05, |
| "loss": 4.4212, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512608730688575e-05, |
| "loss": 4.4171, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6504222783178055e-05, |
| "loss": 4.4229, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6495836835667535e-05, |
| "loss": 4.4249, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6487450888157015e-05, |
| "loss": 4.417, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479064940646495e-05, |
| "loss": 4.4145, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470678993135975e-05, |
| "loss": 4.4125, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462293045625455e-05, |
| "loss": 4.4302, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6453923476918664e-05, |
| "loss": 4.4198, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6445537529408144e-05, |
| "loss": 4.4142, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6437151581897624e-05, |
| "loss": 4.4031, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642878201319083e-05, |
| "loss": 4.4142, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642039606568031e-05, |
| "loss": 4.4049, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64120101181698e-05, |
| "loss": 4.4239, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640362417065928e-05, |
| "loss": 4.4196, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639523822314876e-05, |
| "loss": 4.4054, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638685227563824e-05, |
| "loss": 4.402, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637846632812772e-05, |
| "loss": 4.4119, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637008038061719e-05, |
| "loss": 4.4086, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636169443310667e-05, |
| "loss": 4.4213, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635332486439989e-05, |
| "loss": 4.4008, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634493891688936e-05, |
| "loss": 4.4126, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633655296937884e-05, |
| "loss": 4.41, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632816702186832e-05, |
| "loss": 4.4075, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631979745316154e-05, |
| "loss": 4.4074, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631141150565102e-05, |
| "loss": 4.408, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63030255581405e-05, |
| "loss": 4.3974, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629463961062998e-05, |
| "loss": 4.4028, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628627004192319e-05, |
| "loss": 4.4062, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277884094412666e-05, |
| "loss": 4.3987, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269498146902146e-05, |
| "loss": 4.3952, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261112199391626e-05, |
| "loss": 4.4041, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252742630684835e-05, |
| "loss": 4.4015, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.372279167175293, |
| "eval_runtime": 293.5676, |
| "eval_samples_per_second": 1299.84, |
| "eval_steps_per_second": 40.621, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6244356683174315e-05, |
| "loss": 4.3951, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6235970735663795e-05, |
| "loss": 4.3849, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6227584788153275e-05, |
| "loss": 4.4101, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6219215219446484e-05, |
| "loss": 4.3895, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621082927193597e-05, |
| "loss": 4.4094, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620244332442545e-05, |
| "loss": 4.3834, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619405737691493e-05, |
| "loss": 4.4034, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.618567142940441e-05, |
| "loss": 4.3876, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617728548189389e-05, |
| "loss": 4.3915, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61689159131871e-05, |
| "loss": 4.394, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616052996567658e-05, |
| "loss": 4.3945, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615214401816606e-05, |
| "loss": 4.4011, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614377444945927e-05, |
| "loss": 4.3847, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.613538850194875e-05, |
| "loss": 4.3834, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612700255443823e-05, |
| "loss": 4.3875, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611861660692771e-05, |
| "loss": 4.3737, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611023065941719e-05, |
| "loss": 4.3844, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.610184471190667e-05, |
| "loss": 4.3894, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6093458764396156e-05, |
| "loss": 4.3802, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6085072816885636e-05, |
| "loss": 4.4027, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6076686869375116e-05, |
| "loss": 4.3894, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6068317300668325e-05, |
| "loss": 4.395, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6059931353157805e-05, |
| "loss": 4.3826, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6051545405647285e-05, |
| "loss": 4.3887, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6043159458136764e-05, |
| "loss": 4.383, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6034789889429974e-05, |
| "loss": 4.3789, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6026403941919454e-05, |
| "loss": 4.3822, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6018017994408933e-05, |
| "loss": 4.3742, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6009632046898413e-05, |
| "loss": 4.369, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600126247819162e-05, |
| "loss": 4.3731, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599287653068111e-05, |
| "loss": 4.3829, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598449058317059e-05, |
| "loss": 4.3821, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597610463566007e-05, |
| "loss": 4.3789, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.596771868814955e-05, |
| "loss": 4.3818, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595934911944276e-05, |
| "loss": 4.3658, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595096317193224e-05, |
| "loss": 4.3694, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.594257722442172e-05, |
| "loss": 4.3693, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.59341912769112e-05, |
| "loss": 4.3606, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.592580532940067e-05, |
| "loss": 4.3675, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.591741938189015e-05, |
| "loss": 4.3642, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590903343437963e-05, |
| "loss": 4.3758, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.590066386567285e-05, |
| "loss": 4.3748, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.589227791816233e-05, |
| "loss": 4.3656, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.588389197065181e-05, |
| "loss": 4.3657, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.587550602314129e-05, |
| "loss": 4.3757, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.586712007563077e-05, |
| "loss": 4.3581, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5858750506923976e-05, |
| "loss": 4.3714, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5850364559413456e-05, |
| "loss": 4.3627, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5841978611902936e-05, |
| "loss": 4.347, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5833609043196145e-05, |
| "loss": 4.3708, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5825223095685625e-05, |
| "loss": 4.3583, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5816837148175105e-05, |
| "loss": 4.3583, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5808451200664585e-05, |
| "loss": 4.3486, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5800065253154065e-05, |
| "loss": 4.3563, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5791679305643545e-05, |
| "loss": 4.3447, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5783293358133025e-05, |
| "loss": 4.3666, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577490741062251e-05, |
| "loss": 4.356, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576652146311199e-05, |
| "loss": 4.3594, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575813551560147e-05, |
| "loss": 4.365, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574974956809095e-05, |
| "loss": 4.3507, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574136362058043e-05, |
| "loss": 4.3388, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573301043067737e-05, |
| "loss": 4.3605, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572462448316685e-05, |
| "loss": 4.3439, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.571623853565633e-05, |
| "loss": 4.3409, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.570785258814581e-05, |
| "loss": 4.3588, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569946664063529e-05, |
| "loss": 4.3559, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569108069312477e-05, |
| "loss": 4.3434, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.568269474561425e-05, |
| "loss": 4.3416, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5674325176907465e-05, |
| "loss": 4.334, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5665939229396945e-05, |
| "loss": 4.3451, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5657553281886425e-05, |
| "loss": 4.3529, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5649167334375905e-05, |
| "loss": 4.3534, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5640781386865385e-05, |
| "loss": 4.3523, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.563239543935486e-05, |
| "loss": 4.3473, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.562400949184434e-05, |
| "loss": 4.3611, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5615639923137554e-05, |
| "loss": 4.3373, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5607253975627034e-05, |
| "loss": 4.3471, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559886802811651e-05, |
| "loss": 4.3443, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559048208060599e-05, |
| "loss": 4.3265, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558209613309547e-05, |
| "loss": 4.3505, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.557372656438868e-05, |
| "loss": 4.3485, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556534061687816e-05, |
| "loss": 4.3444, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555695466936764e-05, |
| "loss": 4.3363, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554856872185712e-05, |
| "loss": 4.3276, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.55401827743466e-05, |
| "loss": 4.3404, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.553179682683608e-05, |
| "loss": 4.3376, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.552341087932556e-05, |
| "loss": 4.3449, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.551504131061877e-05, |
| "loss": 4.3348, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.550667174191198e-05, |
| "loss": 4.345, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549828579440146e-05, |
| "loss": 4.332, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548989984689094e-05, |
| "loss": 4.3325, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548151389938042e-05, |
| "loss": 4.3317, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.54731279518699e-05, |
| "loss": 4.329, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.546474200435939e-05, |
| "loss": 4.3477, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.545635605684887e-05, |
| "loss": 4.3438, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544797010933835e-05, |
| "loss": 4.3448, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543958416182783e-05, |
| "loss": 4.3238, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5431214593121036e-05, |
| "loss": 4.3315, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5422828645610516e-05, |
| "loss": 4.3325, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5414442698099996e-05, |
| "loss": 4.3359, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5406056750589476e-05, |
| "loss": 4.3293, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5397670803078956e-05, |
| "loss": 4.3263, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5389284855568436e-05, |
| "loss": 4.3188, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5380898908057916e-05, |
| "loss": 4.3418, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5372512960547396e-05, |
| "loss": 4.3121, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5364143391840605e-05, |
| "loss": 4.3247, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535577382313382e-05, |
| "loss": 4.3214, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.53473878756233e-05, |
| "loss": 4.3244, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533900192811278e-05, |
| "loss": 4.32, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533061598060226e-05, |
| "loss": 4.3331, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532223003309174e-05, |
| "loss": 4.3222, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531384408558122e-05, |
| "loss": 4.3153, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.530547451687443e-05, |
| "loss": 4.3321, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.529710494816764e-05, |
| "loss": 4.3146, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528871900065712e-05, |
| "loss": 4.3291, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.52803330531466e-05, |
| "loss": 4.3373, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527194710563608e-05, |
| "loss": 4.3241, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526356115812556e-05, |
| "loss": 4.3181, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.525517521061504e-05, |
| "loss": 4.3174, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.524678926310452e-05, |
| "loss": 4.3223, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238403315594e-05, |
| "loss": 4.3191, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.523001736808348e-05, |
| "loss": 4.3211, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221647799376694e-05, |
| "loss": 4.3098, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.521326185186617e-05, |
| "loss": 4.3309, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.520487590435565e-05, |
| "loss": 4.323, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.519648995684513e-05, |
| "loss": 4.3183, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518810400933461e-05, |
| "loss": 4.3023, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517971806182409e-05, |
| "loss": 4.3164, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5171348493117297e-05, |
| "loss": 4.3096, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5162962545606777e-05, |
| "loss": 4.3218, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5154576598096256e-05, |
| "loss": 4.3264, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514619065058574e-05, |
| "loss": 4.3146, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513780470307522e-05, |
| "loss": 4.2995, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512943513436843e-05, |
| "loss": 4.3173, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512104918685791e-05, |
| "loss": 4.3115, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511266323934739e-05, |
| "loss": 4.3268, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510427729183687e-05, |
| "loss": 4.3028, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509589134432635e-05, |
| "loss": 4.316, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.508750539681583e-05, |
| "loss": 4.3165, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507911944930531e-05, |
| "loss": 4.3165, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507073350179479e-05, |
| "loss": 4.3113, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5062363933088e-05, |
| "loss": 4.3134, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.505397798557748e-05, |
| "loss": 4.3031, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.50456084168707e-05, |
| "loss": 4.3114, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503722246936018e-05, |
| "loss": 4.3107, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502883652184966e-05, |
| "loss": 4.3082, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502045057433914e-05, |
| "loss": 4.3, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501206462682862e-05, |
| "loss": 4.3134, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.50036786793181e-05, |
| "loss": 4.3064, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.286992073059082, |
| "eval_runtime": 294.0694, |
| "eval_samples_per_second": 1297.622, |
| "eval_steps_per_second": 40.552, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4995292731807577e-05, |
| "loss": 4.2992, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4986906784297057e-05, |
| "loss": 4.2978, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497852083678653e-05, |
| "loss": 4.3161, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497013488927601e-05, |
| "loss": 4.3014, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496174894176549e-05, |
| "loss": 4.3123, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495336299425497e-05, |
| "loss": 4.3002, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494497704674445e-05, |
| "loss": 4.307, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493659109923393e-05, |
| "loss": 4.3003, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492820515172341e-05, |
| "loss": 4.3005, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4919819204212896e-05, |
| "loss": 4.3036, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4911433256702376e-05, |
| "loss": 4.3054, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4903047309191856e-05, |
| "loss": 4.3112, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4894677740485065e-05, |
| "loss": 4.2973, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4886291792974545e-05, |
| "loss": 4.2953, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4877905845464025e-05, |
| "loss": 4.2969, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4869519897953505e-05, |
| "loss": 4.2857, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4861133950442985e-05, |
| "loss": 4.2961, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4852748002932465e-05, |
| "loss": 4.3002, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4844362055421945e-05, |
| "loss": 4.2923, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483597610791142e-05, |
| "loss": 4.3143, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48275901604009e-05, |
| "loss": 4.3023, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481920421289038e-05, |
| "loss": 4.3064, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4810818265379865e-05, |
| "loss": 4.295, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4802432317869345e-05, |
| "loss": 4.3028, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479407912796629e-05, |
| "loss": 4.293, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478569318045577e-05, |
| "loss": 4.2978, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477730723294524e-05, |
| "loss": 4.2943, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476892128543472e-05, |
| "loss": 4.29, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47605353379242e-05, |
| "loss": 4.286, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475214939041368e-05, |
| "loss": 4.2864, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474376344290316e-05, |
| "loss": 4.2938, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473537749539264e-05, |
| "loss": 4.2978, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472700792668585e-05, |
| "loss": 4.2939, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471862197917533e-05, |
| "loss": 4.2948, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4710268789272283e-05, |
| "loss": 4.2815, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4701882841761763e-05, |
| "loss": 4.2876, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4693496894251243e-05, |
| "loss": 4.2876, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4685110946740717e-05, |
| "loss": 4.2753, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4676724999230197e-05, |
| "loss": 4.2879, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4668339051719676e-05, |
| "loss": 4.278, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4659953104209156e-05, |
| "loss": 4.2925, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4651567156698636e-05, |
| "loss": 4.2917, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4643181209188116e-05, |
| "loss": 4.2858, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4634795261677596e-05, |
| "loss": 4.2806, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4626409314167076e-05, |
| "loss": 4.2956, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4618039745460285e-05, |
| "loss": 4.2751, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4609653797949765e-05, |
| "loss": 4.2908, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460126785043925e-05, |
| "loss": 4.2807, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459288190292873e-05, |
| "loss": 4.2663, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458449595541821e-05, |
| "loss": 4.2911, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457611000790769e-05, |
| "loss": 4.2768, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.456772406039717e-05, |
| "loss": 4.2761, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455933811288665e-05, |
| "loss": 4.2718, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455096854417986e-05, |
| "loss": 4.2711, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.454258259666934e-05, |
| "loss": 4.2707, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.453421302796255e-05, |
| "loss": 4.278, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.452582708045203e-05, |
| "loss": 4.2777, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451744113294151e-05, |
| "loss": 4.2779, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450905518543099e-05, |
| "loss": 4.2855, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450066923792047e-05, |
| "loss": 4.2742, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4492299669213686e-05, |
| "loss": 4.2577, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4483913721703166e-05, |
| "loss": 4.2812, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4475527774192646e-05, |
| "loss": 4.2638, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4467141826682126e-05, |
| "loss": 4.2599, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4458755879171606e-05, |
| "loss": 4.2824, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445036993166108e-05, |
| "loss": 4.278, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444198398415056e-05, |
| "loss": 4.2655, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443359803664004e-05, |
| "loss": 4.2654, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442521208912952e-05, |
| "loss": 4.2538, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4416858899226464e-05, |
| "loss": 4.2684, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4408472951715944e-05, |
| "loss": 4.2798, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4400087004205423e-05, |
| "loss": 4.2741, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4391701056694903e-05, |
| "loss": 4.2743, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438331510918438e-05, |
| "loss": 4.271, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437492916167386e-05, |
| "loss": 4.2876, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436654321416334e-05, |
| "loss": 4.2607, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435817364545655e-05, |
| "loss": 4.2735, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434978769794603e-05, |
| "loss": 4.2704, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434140175043551e-05, |
| "loss": 4.2457, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433301580292499e-05, |
| "loss": 4.2768, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432462985541447e-05, |
| "loss": 4.2712, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431624390790395e-05, |
| "loss": 4.2696, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430785796039343e-05, |
| "loss": 4.2637, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429947201288291e-05, |
| "loss": 4.2496, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429110244417613e-05, |
| "loss": 4.2642, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.428273287546934e-05, |
| "loss": 4.2627, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427434692795882e-05, |
| "loss": 4.2719, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.42659609804483e-05, |
| "loss": 4.26, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.425757503293778e-05, |
| "loss": 4.2702, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424918908542726e-05, |
| "loss": 4.2582, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424080313791674e-05, |
| "loss": 4.2623, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423241719040622e-05, |
| "loss": 4.2559, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.42240312428957e-05, |
| "loss": 4.2552, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421564529538518e-05, |
| "loss": 4.2727, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4207275726678386e-05, |
| "loss": 4.269, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4198889779167866e-05, |
| "loss": 4.2746, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4190503831657346e-05, |
| "loss": 4.2466, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4182117884146826e-05, |
| "loss": 4.2581, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417374831544004e-05, |
| "loss": 4.2604, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416537874673325e-05, |
| "loss": 4.2619, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415699279922273e-05, |
| "loss": 4.2589, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414860685171221e-05, |
| "loss": 4.2495, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414022090420169e-05, |
| "loss": 4.2528, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413183495669117e-05, |
| "loss": 4.2689, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.412344900918065e-05, |
| "loss": 4.2393, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411506306167013e-05, |
| "loss": 4.2533, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.410667711415961e-05, |
| "loss": 4.2462, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4098291166649083e-05, |
| "loss": 4.2584, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40899215979423e-05, |
| "loss": 4.2488, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4081552029235515e-05, |
| "loss": 4.2618, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4073166081724995e-05, |
| "loss": 4.2535, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4064780134214475e-05, |
| "loss": 4.245, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4056394186703955e-05, |
| "loss": 4.2604, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4048024617997164e-05, |
| "loss": 4.2461, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4039638670486644e-05, |
| "loss": 4.2609, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4031252722976124e-05, |
| "loss": 4.265, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4022866775465604e-05, |
| "loss": 4.2522, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4014480827955084e-05, |
| "loss": 4.2467, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400609488044456e-05, |
| "loss": 4.2508, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399770893293404e-05, |
| "loss": 4.2495, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398932298542352e-05, |
| "loss": 4.2521, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398096979552047e-05, |
| "loss": 4.2509, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397258384800995e-05, |
| "loss": 4.2417, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396419790049943e-05, |
| "loss": 4.2607, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395581195298891e-05, |
| "loss": 4.2513, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394742600547839e-05, |
| "loss": 4.2522, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393904005796786e-05, |
| "loss": 4.2352, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393065411045734e-05, |
| "loss": 4.2474, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392226816294682e-05, |
| "loss": 4.2409, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.391389859424003e-05, |
| "loss": 4.2526, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.390551264672951e-05, |
| "loss": 4.2552, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.389712669921899e-05, |
| "loss": 4.2499, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388874075170847e-05, |
| "loss": 4.2289, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388035480419795e-05, |
| "loss": 4.2519, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.387196885668744e-05, |
| "loss": 4.2451, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.386358290917692e-05, |
| "loss": 4.2581, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3855213340470127e-05, |
| "loss": 4.2375, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3846827392959606e-05, |
| "loss": 4.2533, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3838441445449086e-05, |
| "loss": 4.2496, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3830071876742296e-05, |
| "loss": 4.2521, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3821685929231775e-05, |
| "loss": 4.2429, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3813316360524985e-05, |
| "loss": 4.2476, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3804930413014465e-05, |
| "loss": 4.2363, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3796544465503944e-05, |
| "loss": 4.2453, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3788158517993424e-05, |
| "loss": 4.2447, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3779772570482904e-05, |
| "loss": 4.242, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377140300177612e-05, |
| "loss": 4.2358, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.37630170542656e-05, |
| "loss": 4.2452, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375463110675508e-05, |
| "loss": 4.245, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.22967004776001, |
| "eval_runtime": 293.6171, |
| "eval_samples_per_second": 1299.621, |
| "eval_steps_per_second": 40.614, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.374624515924456e-05, |
| "loss": 4.2396, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.373785921173404e-05, |
| "loss": 4.2307, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372947326422352e-05, |
| "loss": 4.2493, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3721087316713e-05, |
| "loss": 4.2374, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.371270136920248e-05, |
| "loss": 4.2455, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370431542169196e-05, |
| "loss": 4.2383, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.369592947418144e-05, |
| "loss": 4.2391, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368754352667092e-05, |
| "loss": 4.2398, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367917395796413e-05, |
| "loss": 4.2347, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367078801045361e-05, |
| "loss": 4.2386, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.366240206294309e-05, |
| "loss": 4.2453, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3654016115432576e-05, |
| "loss": 4.2421, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3645646546725785e-05, |
| "loss": 4.2338, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3637260599215265e-05, |
| "loss": 4.2308, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3628874651704745e-05, |
| "loss": 4.2363, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3620488704194225e-05, |
| "loss": 4.2194, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36121027566837e-05, |
| "loss": 4.233, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360371680917318e-05, |
| "loss": 4.2339, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359533086166266e-05, |
| "loss": 4.2336, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.358694491415214e-05, |
| "loss": 4.2512, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357857534544535e-05, |
| "loss": 4.2407, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357018939793483e-05, |
| "loss": 4.2435, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.356181982922804e-05, |
| "loss": 4.2334, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.355343388171752e-05, |
| "loss": 4.2383, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3545047934207e-05, |
| "loss": 4.2318, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.353666198669648e-05, |
| "loss": 4.2344, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352827603918596e-05, |
| "loss": 4.2303, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351989009167544e-05, |
| "loss": 4.229, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351150414416492e-05, |
| "loss": 4.2257, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.350313457545813e-05, |
| "loss": 4.2262, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.349474862794761e-05, |
| "loss": 4.2302, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.348636268043709e-05, |
| "loss": 4.2347, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347797673292657e-05, |
| "loss": 4.2305, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346959078541605e-05, |
| "loss": 4.2365, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346120483790553e-05, |
| "loss": 4.2199, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.345281889039501e-05, |
| "loss": 4.2277, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.344443294288449e-05, |
| "loss": 4.2264, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.343606337417771e-05, |
| "loss": 4.2155, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.342767742666719e-05, |
| "loss": 4.2296, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.341929147915667e-05, |
| "loss": 4.2134, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.341090553164615e-05, |
| "loss": 4.2316, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402535962939356e-05, |
| "loss": 4.2307, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3394166394232565e-05, |
| "loss": 4.2294, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3385780446722045e-05, |
| "loss": 4.2209, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3377394499211525e-05, |
| "loss": 4.2365, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3369008551701005e-05, |
| "loss": 4.2134, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3360622604190485e-05, |
| "loss": 4.2334, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3352236656679965e-05, |
| "loss": 4.2207, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3343850709169445e-05, |
| "loss": 4.201, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.333548114046266e-05, |
| "loss": 4.2359, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.332709519295214e-05, |
| "loss": 4.2147, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331870924544162e-05, |
| "loss": 4.222, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.33103232979311e-05, |
| "loss": 4.2097, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330193735042058e-05, |
| "loss": 4.2105, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3293551402910054e-05, |
| "loss": 4.2162, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3285165455399534e-05, |
| "loss": 4.2169, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3276779507889013e-05, |
| "loss": 4.2225, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326840993918223e-05, |
| "loss": 4.218, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32600239916717e-05, |
| "loss": 4.2313, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325163804416118e-05, |
| "loss": 4.2138, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324325209665067e-05, |
| "loss": 4.2031, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.323486614914015e-05, |
| "loss": 4.2225, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322649658043336e-05, |
| "loss": 4.203, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321811063292284e-05, |
| "loss": 4.2038, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320972468541232e-05, |
| "loss": 4.2251, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32013387379018e-05, |
| "loss": 4.2219, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.319296916919501e-05, |
| "loss": 4.2069, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.318459960048822e-05, |
| "loss": 4.2108, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.31762136529777e-05, |
| "loss": 4.1985, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3167827705467176e-05, |
| "loss": 4.2096, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3159441757956656e-05, |
| "loss": 4.2209, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3151055810446136e-05, |
| "loss": 4.2182, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.314266986293562e-05, |
| "loss": 4.2178, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.31342839154251e-05, |
| "loss": 4.2147, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.312589796791458e-05, |
| "loss": 4.2313, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311751202040406e-05, |
| "loss": 4.2035, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310912607289354e-05, |
| "loss": 4.2166, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310075650418675e-05, |
| "loss": 4.2123, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.309237055667623e-05, |
| "loss": 4.1927, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308398460916571e-05, |
| "loss": 4.2188, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.307561504045892e-05, |
| "loss": 4.2164, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.30672290929484e-05, |
| "loss": 4.2149, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305884314543788e-05, |
| "loss": 4.2104, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305045719792736e-05, |
| "loss": 4.1985, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.304207125041684e-05, |
| "loss": 4.2033, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.303368530290632e-05, |
| "loss": 4.208, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.30252993553958e-05, |
| "loss": 4.2158, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3016929786689016e-05, |
| "loss": 4.2038, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3008560217982226e-05, |
| "loss": 4.2148, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.3000190649275435e-05, |
| "loss": 4.2027, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2991804701764915e-05, |
| "loss": 4.208, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2983418754254395e-05, |
| "loss": 4.2004, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2975032806743874e-05, |
| "loss": 4.2022, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2966646859233354e-05, |
| "loss": 4.2172, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2958260911722834e-05, |
| "loss": 4.216, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2949874964212314e-05, |
| "loss": 4.2195, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2941489016701794e-05, |
| "loss": 4.1925, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2933103069191274e-05, |
| "loss": 4.206, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2924717121680754e-05, |
| "loss": 4.202, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291633117417024e-05, |
| "loss": 4.211, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290796160546345e-05, |
| "loss": 4.2022, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289957565795293e-05, |
| "loss": 4.2016, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289118971044241e-05, |
| "loss": 4.1955, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.288282014173562e-05, |
| "loss": 4.2121, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.28744341942251e-05, |
| "loss": 4.1874, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.286604824671458e-05, |
| "loss": 4.2033, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.285766229920406e-05, |
| "loss": 4.1884, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284927635169354e-05, |
| "loss": 4.2066, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284089040418301e-05, |
| "loss": 4.1986, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283250445667249e-05, |
| "loss": 4.2023, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.282411850916198e-05, |
| "loss": 4.1989, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.281573256165146e-05, |
| "loss": 4.1989, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.280734661414094e-05, |
| "loss": 4.203, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279896066663042e-05, |
| "loss": 4.1965, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.27905747191199e-05, |
| "loss": 4.203, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.278218877160938e-05, |
| "loss": 4.2171, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.277381920290259e-05, |
| "loss": 4.1994, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276543325539207e-05, |
| "loss": 4.194, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275704730788155e-05, |
| "loss": 4.1987, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274866136037103e-05, |
| "loss": 4.2008, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274027541286051e-05, |
| "loss": 4.1915, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.273188946534999e-05, |
| "loss": 4.2013, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.272350351783947e-05, |
| "loss": 4.188, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2715133949132676e-05, |
| "loss": 4.2069, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.270674800162216e-05, |
| "loss": 4.2038, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269836205411164e-05, |
| "loss": 4.1956, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268997610660112e-05, |
| "loss": 4.188, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26815901590906e-05, |
| "loss": 4.1934, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267322059038381e-05, |
| "loss": 4.1938, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266483464287329e-05, |
| "loss": 4.2032, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.265644869536277e-05, |
| "loss": 4.1995, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264806274785225e-05, |
| "loss": 4.2015, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2639676800341725e-05, |
| "loss": 4.1777, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2631290852831205e-05, |
| "loss": 4.2008, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2622904905320685e-05, |
| "loss": 4.1952, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2614518957810165e-05, |
| "loss": 4.2064, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.260614938910338e-05, |
| "loss": 4.1867, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.259776344159286e-05, |
| "loss": 4.2056, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258937749408234e-05, |
| "loss": 4.1968, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258099154657182e-05, |
| "loss": 4.2028, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.257262197786503e-05, |
| "loss": 4.1883, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.256423603035451e-05, |
| "loss": 4.2042, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.255585008284399e-05, |
| "loss": 4.18, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.254746413533347e-05, |
| "loss": 4.1965, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253909456662668e-05, |
| "loss": 4.1954, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253070861911616e-05, |
| "loss": 4.1869, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252232267160564e-05, |
| "loss": 4.193, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251393672409512e-05, |
| "loss": 4.1888, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.250558353419207e-05, |
| "loss": 4.1964, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.18727445602417, |
| "eval_runtime": 300.1993, |
| "eval_samples_per_second": 1271.126, |
| "eval_steps_per_second": 39.724, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249719758668155e-05, |
| "loss": 4.1922, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2488811639171024e-05, |
| "loss": 4.1789, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2480425691660504e-05, |
| "loss": 4.1966, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2472039744149984e-05, |
| "loss": 4.1871, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2463653796639464e-05, |
| "loss": 4.1965, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2455267849128943e-05, |
| "loss": 4.1874, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2446881901618423e-05, |
| "loss": 4.1902, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24384959541079e-05, |
| "loss": 4.1919, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243011000659738e-05, |
| "loss": 4.1868, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242172405908686e-05, |
| "loss": 4.1895, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241333811157634e-05, |
| "loss": 4.1947, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.240495216406582e-05, |
| "loss": 4.1938, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239658259535903e-05, |
| "loss": 4.1858, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238819664784852e-05, |
| "loss": 4.1829, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2379810700338e-05, |
| "loss": 4.1876, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237142475282748e-05, |
| "loss": 4.1692, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236303880531696e-05, |
| "loss": 4.1856, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235465285780644e-05, |
| "loss": 4.1851, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234626691029591e-05, |
| "loss": 4.1808, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233788096278539e-05, |
| "loss": 4.2039, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232949501527487e-05, |
| "loss": 4.1956, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232112544656809e-05, |
| "loss": 4.1971, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.231273949905756e-05, |
| "loss": 4.1835, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230435355154704e-05, |
| "loss": 4.1899, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.229596760403652e-05, |
| "loss": 4.1855, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2287581656526e-05, |
| "loss": 4.1858, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227919570901549e-05, |
| "loss": 4.1832, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.22708261403087e-05, |
| "loss": 4.185, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226244019279818e-05, |
| "loss": 4.1757, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2254054245287657e-05, |
| "loss": 4.1783, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2245668297777137e-05, |
| "loss": 4.1809, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2237282350266617e-05, |
| "loss": 4.1897, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2228896402756096e-05, |
| "loss": 4.1833, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2220510455245576e-05, |
| "loss": 4.1897, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2212124507735056e-05, |
| "loss": 4.173, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2203754939028265e-05, |
| "loss": 4.1779, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2195368991517745e-05, |
| "loss": 4.1812, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2186983044007225e-05, |
| "loss": 4.1701, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2178597096496705e-05, |
| "loss": 4.1782, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2170211148986185e-05, |
| "loss": 4.1709, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216182520147567e-05, |
| "loss": 4.181, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215343925396515e-05, |
| "loss": 4.183, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2145053306454625e-05, |
| "loss": 4.1794, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2136667358944105e-05, |
| "loss": 4.1762, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128281411433585e-05, |
| "loss": 4.186, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21199118427268e-05, |
| "loss": 4.1714, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2111525895216274e-05, |
| "loss": 4.1838, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2103139947705754e-05, |
| "loss": 4.1758, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2094754000195234e-05, |
| "loss": 4.1529, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2086368052684714e-05, |
| "loss": 4.1975, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207799848397792e-05, |
| "loss": 4.1676, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206961253646741e-05, |
| "loss": 4.1788, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206122658895689e-05, |
| "loss": 4.1606, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.205284064144637e-05, |
| "loss": 4.1689, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204447107273958e-05, |
| "loss": 4.1644, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2036101504032795e-05, |
| "loss": 4.1734, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2027715556522275e-05, |
| "loss": 4.1749, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201932960901175e-05, |
| "loss": 4.1757, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201094366150123e-05, |
| "loss": 4.1838, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200255771399071e-05, |
| "loss": 4.1696, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199417176648019e-05, |
| "loss": 4.1563, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198578581896967e-05, |
| "loss": 4.1826, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197739987145915e-05, |
| "loss": 4.1574, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1969030302752363e-05, |
| "loss": 4.1559, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1960644355241843e-05, |
| "loss": 4.1809, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1952258407731323e-05, |
| "loss": 4.1781, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19438724602208e-05, |
| "loss": 4.1603, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193550289151401e-05, |
| "loss": 4.1695, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192711694400349e-05, |
| "loss": 4.1532, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191873099649297e-05, |
| "loss": 4.1656, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191034504898245e-05, |
| "loss": 4.174, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.190195910147193e-05, |
| "loss": 4.1719, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189358953276514e-05, |
| "loss": 4.1759, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188520358525462e-05, |
| "loss": 4.1733, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18768176377441e-05, |
| "loss": 4.1835, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186843169023358e-05, |
| "loss": 4.1579, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186004574272306e-05, |
| "loss": 4.174, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185165979521254e-05, |
| "loss": 4.1647, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184327384770203e-05, |
| "loss": 4.1549, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183488790019151e-05, |
| "loss": 4.1692, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182651833148472e-05, |
| "loss": 4.1756, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.18181323839742e-05, |
| "loss": 4.1717, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180974643646368e-05, |
| "loss": 4.1685, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180136048895316e-05, |
| "loss": 4.1544, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1792990920246366e-05, |
| "loss": 4.156, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1784604972735846e-05, |
| "loss": 4.1689, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1776219025225326e-05, |
| "loss": 4.1715, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1767833077714806e-05, |
| "loss": 4.1603, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1759463509008015e-05, |
| "loss": 4.1724, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1751077561497495e-05, |
| "loss": 4.1518, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174269161398698e-05, |
| "loss": 4.168, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173430566647646e-05, |
| "loss": 4.1583, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1725919718965935e-05, |
| "loss": 4.1611, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1717533771455415e-05, |
| "loss": 4.1746, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1709147823944895e-05, |
| "loss": 4.174, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1700761876434375e-05, |
| "loss": 4.1764, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1692375928923855e-05, |
| "loss": 4.1498, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1684006360217064e-05, |
| "loss": 4.1644, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1675620412706544e-05, |
| "loss": 4.1563, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166725084399975e-05, |
| "loss": 4.1692, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165886489648923e-05, |
| "loss": 4.1596, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165049532778245e-05, |
| "loss": 4.1628, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1642109380271935e-05, |
| "loss": 4.1547, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163372343276141e-05, |
| "loss": 4.1666, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162533748525089e-05, |
| "loss": 4.1489, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161695153774037e-05, |
| "loss": 4.1622, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160856559022985e-05, |
| "loss": 4.1458, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160017964271933e-05, |
| "loss": 4.1663, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159179369520881e-05, |
| "loss": 4.1571, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158340774769829e-05, |
| "loss": 4.1593, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.157502180018777e-05, |
| "loss": 4.1567, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156663585267725e-05, |
| "loss": 4.1606, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155824990516673e-05, |
| "loss": 4.1596, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154988033645994e-05, |
| "loss": 4.1542, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154149438894942e-05, |
| "loss": 4.1652, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1533108441438904e-05, |
| "loss": 4.1715, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152475525153584e-05, |
| "loss": 4.1645, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151636930402532e-05, |
| "loss": 4.1545, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15079833565148e-05, |
| "loss": 4.1542, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149959740900428e-05, |
| "loss": 4.163, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149121146149376e-05, |
| "loss": 4.1501, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148284189278697e-05, |
| "loss": 4.1623, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147445594527645e-05, |
| "loss": 4.1466, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.146606999776593e-05, |
| "loss": 4.1667, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145768405025541e-05, |
| "loss": 4.1607, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144929810274489e-05, |
| "loss": 4.1578, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144091215523437e-05, |
| "loss": 4.1494, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143252620772386e-05, |
| "loss": 4.1535, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.142414026021334e-05, |
| "loss": 4.1529, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141575431270282e-05, |
| "loss": 4.166, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14073683651923e-05, |
| "loss": 4.1588, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139898241768177e-05, |
| "loss": 4.1588, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139059647017125e-05, |
| "loss": 4.1411, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138221052266073e-05, |
| "loss": 4.1615, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1373840953953946e-05, |
| "loss": 4.1532, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136545500644342e-05, |
| "loss": 4.1671, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1357085437736635e-05, |
| "loss": 4.1477, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1348699490226115e-05, |
| "loss": 4.163, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1340313542715595e-05, |
| "loss": 4.1544, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1331927595205075e-05, |
| "loss": 4.1692, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1323541647694555e-05, |
| "loss": 4.1487, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131517207898777e-05, |
| "loss": 4.1574, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1306786131477244e-05, |
| "loss": 4.1442, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1298400183966724e-05, |
| "loss": 4.1559, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290014236456204e-05, |
| "loss": 4.1559, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1281628288945684e-05, |
| "loss": 4.1467, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273242341435164e-05, |
| "loss": 4.1566, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1264856393924644e-05, |
| "loss": 4.1444, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1256470446414124e-05, |
| "loss": 4.1603, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.156093120574951, |
| "eval_runtime": 305.6676, |
| "eval_samples_per_second": 1248.386, |
| "eval_steps_per_second": 39.013, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.124810087770733e-05, |
| "loss": 4.1532, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.123971493019681e-05, |
| "loss": 4.14, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.123132898268629e-05, |
| "loss": 4.1582, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.122294303517577e-05, |
| "loss": 4.1465, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.121455708766526e-05, |
| "loss": 4.1594, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.120618751895847e-05, |
| "loss": 4.1512, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.119780157144795e-05, |
| "loss": 4.1527, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118941562393743e-05, |
| "loss": 4.1543, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118102967642691e-05, |
| "loss": 4.1455, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.117266010772012e-05, |
| "loss": 4.1516, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.11642741602096e-05, |
| "loss": 4.1553, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.115588821269908e-05, |
| "loss": 4.1539, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.114751864399229e-05, |
| "loss": 4.1524, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.113913269648177e-05, |
| "loss": 4.1408, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1130746748971247e-05, |
| "loss": 4.1446, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1122360801460727e-05, |
| "loss": 4.1349, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.111397485395021e-05, |
| "loss": 4.1453, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.110558890643969e-05, |
| "loss": 4.1506, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.109720295892917e-05, |
| "loss": 4.1403, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108881701141865e-05, |
| "loss": 4.1629, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108044744271186e-05, |
| "loss": 4.1583, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.107206149520134e-05, |
| "loss": 4.159, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.106369192649455e-05, |
| "loss": 4.1473, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.105530597898403e-05, |
| "loss": 4.1468, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.104692003147351e-05, |
| "loss": 4.1529, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103853408396299e-05, |
| "loss": 4.1436, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103014813645247e-05, |
| "loss": 4.1449, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.102177856774568e-05, |
| "loss": 4.152, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.101339262023517e-05, |
| "loss": 4.1338, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.100500667272465e-05, |
| "loss": 4.1403, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.099662072521413e-05, |
| "loss": 4.1437, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.098823477770361e-05, |
| "loss": 4.148, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.097984883019308e-05, |
| "loss": 4.1491, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.097146288268256e-05, |
| "loss": 4.1527, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096307693517204e-05, |
| "loss": 4.1358, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.095470736646525e-05, |
| "loss": 4.1381, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.094632141895473e-05, |
| "loss": 4.147, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.093793547144421e-05, |
| "loss": 4.1336, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.092954952393369e-05, |
| "loss": 4.1413, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.092116357642317e-05, |
| "loss": 4.1308, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.091277762891265e-05, |
| "loss": 4.1464, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0904391681402136e-05, |
| "loss": 4.1416, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0896005733891615e-05, |
| "loss": 4.1444, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0887636165184825e-05, |
| "loss": 4.143, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0879250217674305e-05, |
| "loss": 4.1454, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0870864270163784e-05, |
| "loss": 4.1369, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0862478322653264e-05, |
| "loss": 4.1445, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0854108753946474e-05, |
| "loss": 4.139, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.084573918523968e-05, |
| "loss": 4.1187, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.083735323772916e-05, |
| "loss": 4.1586, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082896729021864e-05, |
| "loss": 4.1295, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082058134270812e-05, |
| "loss": 4.1421, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.08121953951976e-05, |
| "loss": 4.1292, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080380944768708e-05, |
| "loss": 4.1292, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.079542350017657e-05, |
| "loss": 4.1254, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.078703755266605e-05, |
| "loss": 4.1351, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077865160515553e-05, |
| "loss": 4.1409, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.077028203644874e-05, |
| "loss": 4.1344, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.076189608893822e-05, |
| "loss": 4.1489, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.07535101414277e-05, |
| "loss": 4.1401, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.074512419391718e-05, |
| "loss": 4.1161, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.073673824640666e-05, |
| "loss": 4.1421, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072836867769987e-05, |
| "loss": 4.1244, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071998273018935e-05, |
| "loss": 4.1175, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071159678267883e-05, |
| "loss": 4.1484, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0703227213972036e-05, |
| "loss": 4.1394, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069484126646152e-05, |
| "loss": 4.1252, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0686455318951e-05, |
| "loss": 4.1343, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067806937144048e-05, |
| "loss": 4.1195, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066969980273369e-05, |
| "loss": 4.1298, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066131385522317e-05, |
| "loss": 4.1365, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.065292790771265e-05, |
| "loss": 4.1376, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.064454196020213e-05, |
| "loss": 4.1407, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.063615601269161e-05, |
| "loss": 4.1345, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0627770065181085e-05, |
| "loss": 4.1493, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0619384117670565e-05, |
| "loss": 4.1182, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0610998170160045e-05, |
| "loss": 4.1414, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0602612222649525e-05, |
| "loss": 4.1265, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0594226275139005e-05, |
| "loss": 4.1225, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.058584032762849e-05, |
| "loss": 4.1349, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.057745438011797e-05, |
| "loss": 4.1397, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056908481141118e-05, |
| "loss": 4.1351, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056069886390066e-05, |
| "loss": 4.1377, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.055231291639014e-05, |
| "loss": 4.1151, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.054392696887962e-05, |
| "loss": 4.1207, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.053555740017283e-05, |
| "loss": 4.1319, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.052718783146604e-05, |
| "loss": 4.1378, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051880188395552e-05, |
| "loss": 4.1285, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0510415936445e-05, |
| "loss": 4.1365, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.050202998893448e-05, |
| "loss": 4.1183, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.049364404142396e-05, |
| "loss": 4.134, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0485258093913445e-05, |
| "loss": 4.124, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0476872146402925e-05, |
| "loss": 4.1238, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0468502577696134e-05, |
| "loss": 4.1379, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0460116630185614e-05, |
| "loss": 4.1377, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0451730682675094e-05, |
| "loss": 4.1418, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0443344735164574e-05, |
| "loss": 4.1207, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.043497516645778e-05, |
| "loss": 4.1265, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.042658921894726e-05, |
| "loss": 4.1244, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.041820327143674e-05, |
| "loss": 4.1316, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.040981732392622e-05, |
| "loss": 4.1259, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.040144775521943e-05, |
| "loss": 4.1303, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.039306180770891e-05, |
| "loss": 4.1212, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.03846758601984e-05, |
| "loss": 4.1272, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.037628991268788e-05, |
| "loss": 4.1221, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036790396517736e-05, |
| "loss": 4.1243, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035951801766684e-05, |
| "loss": 4.1097, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035113207015632e-05, |
| "loss": 4.1333, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.03427461226458e-05, |
| "loss": 4.1247, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033437655393901e-05, |
| "loss": 4.1271, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.032599060642849e-05, |
| "loss": 4.1215, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.031760465891797e-05, |
| "loss": 4.1262, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030921871140745e-05, |
| "loss": 4.1279, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030083276389692e-05, |
| "loss": 4.1206, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0292479573993866e-05, |
| "loss": 4.1312, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028409362648335e-05, |
| "loss": 4.1364, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.027570767897283e-05, |
| "loss": 4.1253, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.026732173146231e-05, |
| "loss": 4.1289, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025893578395179e-05, |
| "loss": 4.118, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025054983644127e-05, |
| "loss": 4.1242, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0242163888930745e-05, |
| "loss": 4.1248, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.023379432022396e-05, |
| "loss": 4.1276, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.022540837271344e-05, |
| "loss": 4.1109, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.021703880400665e-05, |
| "loss": 4.1311, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020865285649613e-05, |
| "loss": 4.1275, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.020026690898561e-05, |
| "loss": 4.126, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.019188096147509e-05, |
| "loss": 4.1159, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.018349501396457e-05, |
| "loss": 4.1199, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.017510906645405e-05, |
| "loss": 4.1187, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.016672311894353e-05, |
| "loss": 4.1296, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015833717143301e-05, |
| "loss": 4.1246, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014995122392249e-05, |
| "loss": 4.1276, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01415816552157e-05, |
| "loss": 4.1063, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.013319570770518e-05, |
| "loss": 4.1277, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.012480976019466e-05, |
| "loss": 4.1224, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.011642381268414e-05, |
| "loss": 4.133, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010805424397735e-05, |
| "loss": 4.1171, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009966829646683e-05, |
| "loss": 4.1258, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009128234895631e-05, |
| "loss": 4.1207, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.008289640144579e-05, |
| "loss": 4.1377, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.007451045393527e-05, |
| "loss": 4.1167, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0066124506424755e-05, |
| "loss": 4.1251, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0057738558914235e-05, |
| "loss": 4.1177, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0049352611403714e-05, |
| "loss": 4.1236, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0040983042696924e-05, |
| "loss": 4.1187, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0032597095186404e-05, |
| "loss": 4.1154, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0024211147675883e-05, |
| "loss": 4.1235, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0015825200165363e-05, |
| "loss": 4.1112, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0007439252654843e-05, |
| "loss": 4.1312, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.131312370300293, |
| "eval_runtime": 307.6483, |
| "eval_samples_per_second": 1240.348, |
| "eval_steps_per_second": 38.762, |
| "step": 610560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999905330514432e-05, |
| "loss": 4.1158, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99906673576338e-05, |
| "loss": 4.1105, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998228141012328e-05, |
| "loss": 4.1227, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9973895462612756e-05, |
| "loss": 4.1174, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9965509515102236e-05, |
| "loss": 4.1285, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995712356759172e-05, |
| "loss": 4.1187, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99487376200812e-05, |
| "loss": 4.1211, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994035167257068e-05, |
| "loss": 4.123, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993196572506016e-05, |
| "loss": 4.1125, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.992357977754964e-05, |
| "loss": 4.121, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991519383003912e-05, |
| "loss": 4.1246, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99068078825286e-05, |
| "loss": 4.1228, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989843831382181e-05, |
| "loss": 4.1243, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989005236631129e-05, |
| "loss": 4.1065, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.988166641880077e-05, |
| "loss": 4.1165, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.987328047129025e-05, |
| "loss": 4.1022, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.986489452377973e-05, |
| "loss": 4.1173, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.985650857626921e-05, |
| "loss": 4.1173, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984812262875869e-05, |
| "loss": 4.1097, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983973668124817e-05, |
| "loss": 4.1304, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983135073373765e-05, |
| "loss": 4.1268, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982296478622713e-05, |
| "loss": 4.1256, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981457883871661e-05, |
| "loss": 4.119, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980619289120609e-05, |
| "loss": 4.1179, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979780694369557e-05, |
| "loss": 4.1238, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978943737498878e-05, |
| "loss": 4.1105, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978105142747826e-05, |
| "loss": 4.1123, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977266547996774e-05, |
| "loss": 4.1198, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976429591126095e-05, |
| "loss": 4.1054, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975590996375043e-05, |
| "loss": 4.1076, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974752401623991e-05, |
| "loss": 4.1151, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973913806872939e-05, |
| "loss": 4.1183, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973075212121887e-05, |
| "loss": 4.112, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9722366173708356e-05, |
| "loss": 4.1242, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9713980226197836e-05, |
| "loss": 4.1058, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9705610657491045e-05, |
| "loss": 4.1075, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9697224709980525e-05, |
| "loss": 4.1138, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9688838762470005e-05, |
| "loss": 4.098, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9680452814959485e-05, |
| "loss": 4.1128, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9672066867448965e-05, |
| "loss": 4.1032, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9663680919938445e-05, |
| "loss": 4.1112, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9655294972427925e-05, |
| "loss": 4.1124, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9646909024917405e-05, |
| "loss": 4.1128, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9638539456210614e-05, |
| "loss": 4.1146, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9630153508700094e-05, |
| "loss": 4.1137, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9621767561189574e-05, |
| "loss": 4.1083, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9613381613679054e-05, |
| "loss": 4.1121, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960499566616854e-05, |
| "loss": 4.111, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959662609746175e-05, |
| "loss": 4.0875, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958824014995123e-05, |
| "loss": 4.1226, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957985420244071e-05, |
| "loss": 4.0996, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957146825493018e-05, |
| "loss": 4.1142, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956308230741966e-05, |
| "loss": 4.0997, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955471273871288e-05, |
| "loss": 4.0982, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954632679120236e-05, |
| "loss": 4.0997, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953794084369183e-05, |
| "loss": 4.0997, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952955489618131e-05, |
| "loss": 4.1147, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952116894867079e-05, |
| "loss": 4.0998, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.951279937996401e-05, |
| "loss": 4.1213, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.950441343245349e-05, |
| "loss": 4.1092, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949602748494297e-05, |
| "loss": 4.087, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948765791623618e-05, |
| "loss": 4.1079, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9479271968725656e-05, |
| "loss": 4.1013, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9470886021215136e-05, |
| "loss": 4.0877, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9462500073704616e-05, |
| "loss": 4.1148, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454114126194096e-05, |
| "loss": 4.1079, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9445728178683576e-05, |
| "loss": 4.096, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9437342231173056e-05, |
| "loss": 4.1058, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9428956283662536e-05, |
| "loss": 4.0862, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9420586714955745e-05, |
| "loss": 4.1014, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.941220076744523e-05, |
| "loss": 4.1056, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.940381481993471e-05, |
| "loss": 4.1042, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.939542887242419e-05, |
| "loss": 4.1143, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.938704292491367e-05, |
| "loss": 4.1006, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937867335620688e-05, |
| "loss": 4.1184, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937028740869636e-05, |
| "loss": 4.0967, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936191783998957e-05, |
| "loss": 4.1091, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935353189247905e-05, |
| "loss": 4.0965, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934514594496853e-05, |
| "loss": 4.0945, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.933675999745801e-05, |
| "loss": 4.1048, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932839042875122e-05, |
| "loss": 4.1132, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.93200044812407e-05, |
| "loss": 4.1037, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9311618533730186e-05, |
| "loss": 4.1093, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9303232586219666e-05, |
| "loss": 4.087, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9294846638709146e-05, |
| "loss": 4.0908, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9286460691198626e-05, |
| "loss": 4.1062, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9278074743688105e-05, |
| "loss": 4.1047, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9269688796177585e-05, |
| "loss": 4.0974, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9261302848667065e-05, |
| "loss": 4.1069, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9252916901156545e-05, |
| "loss": 4.0919, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924453095364602e-05, |
| "loss": 4.1022, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9236161384939234e-05, |
| "loss": 4.0959, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9227775437428714e-05, |
| "loss": 4.0968, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9219389489918194e-05, |
| "loss": 4.1055, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.921100354240767e-05, |
| "loss": 4.1089, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.920261759489715e-05, |
| "loss": 4.1139, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.919424802619036e-05, |
| "loss": 4.092, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.918587845748358e-05, |
| "loss": 4.0997, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.917749250997306e-05, |
| "loss": 4.0955, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916910656246254e-05, |
| "loss": 4.1017, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.916072061495202e-05, |
| "loss": 4.0991, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.915233466744149e-05, |
| "loss": 4.0998, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914394871993097e-05, |
| "loss": 4.0968, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.913557915122419e-05, |
| "loss": 4.0956, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912719320371367e-05, |
| "loss": 4.0929, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911882363500688e-05, |
| "loss": 4.0957, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911043768749636e-05, |
| "loss": 4.0829, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910205173998584e-05, |
| "loss": 4.1021, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909366579247532e-05, |
| "loss": 4.0981, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90852798449648e-05, |
| "loss": 4.093, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907689389745428e-05, |
| "loss": 4.0972, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906850794994376e-05, |
| "loss": 4.0943, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906012200243324e-05, |
| "loss": 4.0987, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.905173605492272e-05, |
| "loss": 4.092, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90433501074122e-05, |
| "loss": 4.1061, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.903496415990168e-05, |
| "loss": 4.107, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9026578212391157e-05, |
| "loss": 4.0998, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9018208643684366e-05, |
| "loss": 4.0981, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9009822696173846e-05, |
| "loss": 4.0925, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9001436748663326e-05, |
| "loss": 4.0923, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.899306717995654e-05, |
| "loss": 4.0984, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.898468123244602e-05, |
| "loss": 4.0993, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89762952849355e-05, |
| "loss": 4.0814, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896790933742498e-05, |
| "loss": 4.1063, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895952338991446e-05, |
| "loss": 4.0994, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895113744240394e-05, |
| "loss": 4.0989, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894275149489342e-05, |
| "loss": 4.0891, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89343655473829e-05, |
| "loss": 4.0909, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892597959987238e-05, |
| "loss": 4.0914, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8917593652361854e-05, |
| "loss": 4.1013, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8909207704851334e-05, |
| "loss": 4.0975, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8900821757340814e-05, |
| "loss": 4.1019, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.889245218863403e-05, |
| "loss": 4.0773, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.888406624112351e-05, |
| "loss": 4.101, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.887568029361299e-05, |
| "loss": 4.0958, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8867310724906206e-05, |
| "loss": 4.1087, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885892477739568e-05, |
| "loss": 4.0908, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885053882988516e-05, |
| "loss": 4.0943, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884215288237464e-05, |
| "loss": 4.0977, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.883376693486412e-05, |
| "loss": 4.1058, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88253809873536e-05, |
| "loss": 4.0886, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.881701141864681e-05, |
| "loss": 4.0988, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880862547113629e-05, |
| "loss": 4.0927, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880023952362577e-05, |
| "loss": 4.1011, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.879185357611525e-05, |
| "loss": 4.089, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.878346762860473e-05, |
| "loss": 4.086, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.877508168109421e-05, |
| "loss": 4.0944, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8766695733583694e-05, |
| "loss": 4.0869, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8758309786073174e-05, |
| "loss": 4.102, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.111427307128906, |
| "eval_runtime": 303.002, |
| "eval_samples_per_second": 1259.368, |
| "eval_steps_per_second": 39.356, |
| "step": 686880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8749940217366384e-05, |
| "loss": 4.0898, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8741554269855863e-05, |
| "loss": 4.0832, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873318470114907e-05, |
| "loss": 4.0946, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872479875363855e-05, |
| "loss": 4.0908, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871641280612803e-05, |
| "loss": 4.1051, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870802685861751e-05, |
| "loss": 4.0923, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869964091110699e-05, |
| "loss": 4.0887, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86912713424002e-05, |
| "loss": 4.0997, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.868290177369342e-05, |
| "loss": 4.0866, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86745158261829e-05, |
| "loss": 4.0938, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866612987867238e-05, |
| "loss": 4.0968, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865774393116186e-05, |
| "loss": 4.0957, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864935798365134e-05, |
| "loss": 4.0963, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864097203614082e-05, |
| "loss": 4.0812, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86325860886303e-05, |
| "loss": 4.0871, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.862420014111978e-05, |
| "loss": 4.0727, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8615830572412986e-05, |
| "loss": 4.0909, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8607444624902466e-05, |
| "loss": 4.094, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8599058677391946e-05, |
| "loss": 4.0802, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8590672729881426e-05, |
| "loss": 4.1019, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8582286782370906e-05, |
| "loss": 4.0998, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8573900834860386e-05, |
| "loss": 4.0964, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8565514887349866e-05, |
| "loss": 4.0946, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8557128939839346e-05, |
| "loss": 4.0923, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854875937113256e-05, |
| "loss": 4.0945, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854037342362204e-05, |
| "loss": 4.0902, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8531987476111515e-05, |
| "loss": 4.0782, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8523601528600995e-05, |
| "loss": 4.095, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851523195989421e-05, |
| "loss": 4.0769, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850684601238369e-05, |
| "loss": 4.0852, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8498460064873164e-05, |
| "loss": 4.0898, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8490074117362644e-05, |
| "loss": 4.0919, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8481688169852124e-05, |
| "loss": 4.0849, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8473302222341604e-05, |
| "loss": 4.097, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.846493265363482e-05, |
| "loss": 4.082, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84565467061243e-05, |
| "loss": 4.0787, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.844816075861378e-05, |
| "loss": 4.0899, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843977481110326e-05, |
| "loss": 4.0737, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843138886359274e-05, |
| "loss": 4.0842, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842300291608222e-05, |
| "loss": 4.0844, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84146169685717e-05, |
| "loss": 4.0844, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.840623102106118e-05, |
| "loss": 4.0869, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839784507355066e-05, |
| "loss": 4.0877, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838947550484387e-05, |
| "loss": 4.0875, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838108955733335e-05, |
| "loss": 4.0872, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.837270360982283e-05, |
| "loss": 4.0808, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836431766231231e-05, |
| "loss": 4.0854, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.835593171480179e-05, |
| "loss": 4.0872, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8347562146095004e-05, |
| "loss": 4.0646, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8339176198584484e-05, |
| "loss": 4.0954, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.833080662987769e-05, |
| "loss": 4.0772, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.832242068236717e-05, |
| "loss": 4.0858, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.831405111366038e-05, |
| "loss": 4.0737, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.830566516614986e-05, |
| "loss": 4.0711, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.829727921863934e-05, |
| "loss": 4.0742, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828889327112882e-05, |
| "loss": 4.0752, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.82805073236183e-05, |
| "loss": 4.09, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.827212137610778e-05, |
| "loss": 4.075, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.826373542859726e-05, |
| "loss": 4.0967, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.825534948108674e-05, |
| "loss": 4.0866, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824696353357622e-05, |
| "loss": 4.0623, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.82385775860657e-05, |
| "loss": 4.0822, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823019163855518e-05, |
| "loss": 4.071, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822180569104466e-05, |
| "loss": 4.0689, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.821343612233788e-05, |
| "loss": 4.0868, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8205066553631087e-05, |
| "loss": 4.0843, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8196680606120567e-05, |
| "loss": 4.0744, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8188294658610047e-05, |
| "loss": 4.0825, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817990871109952e-05, |
| "loss": 4.0636, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8171522763589e-05, |
| "loss": 4.0723, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.816313681607848e-05, |
| "loss": 4.0821, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.815475086856796e-05, |
| "loss": 4.0819, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.814636492105744e-05, |
| "loss": 4.0891, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813797897354692e-05, |
| "loss": 4.0752, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8129609404840135e-05, |
| "loss": 4.0957, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8121223457329615e-05, |
| "loss": 4.0687, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8112837509819095e-05, |
| "loss": 4.0879, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8104451562308575e-05, |
| "loss": 4.0688, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8096081993601784e-05, |
| "loss": 4.0722, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8087712424894993e-05, |
| "loss": 4.0784, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.807932647738447e-05, |
| "loss": 4.0878, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.807095690867769e-05, |
| "loss": 4.0807, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.806257096116717e-05, |
| "loss": 4.0865, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.805418501365665e-05, |
| "loss": 4.063, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804579906614613e-05, |
| "loss": 4.0644, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.803741311863561e-05, |
| "loss": 4.0823, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802902717112509e-05, |
| "loss": 4.0794, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802064122361457e-05, |
| "loss": 4.0724, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.801225527610405e-05, |
| "loss": 4.0818, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800386932859353e-05, |
| "loss": 4.0741, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799548338108301e-05, |
| "loss": 4.0746, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798709743357249e-05, |
| "loss": 4.0708, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79787278648657e-05, |
| "loss": 4.0721, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797034191735518e-05, |
| "loss": 4.0841, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796195596984466e-05, |
| "loss": 4.087, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.795357002233414e-05, |
| "loss": 4.0878, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.794520045362735e-05, |
| "loss": 4.0685, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793681450611683e-05, |
| "loss": 4.0752, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7928428558606314e-05, |
| "loss": 4.0711, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7920042611095793e-05, |
| "loss": 4.0798, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7911656663585273e-05, |
| "loss": 4.0766, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7903270716074753e-05, |
| "loss": 4.071, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.789488476856423e-05, |
| "loss": 4.0748, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.788649882105371e-05, |
| "loss": 4.068, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787812925234692e-05, |
| "loss": 4.0701, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78697433048364e-05, |
| "loss": 4.0729, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.786137373612961e-05, |
| "loss": 4.0623, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785298778861909e-05, |
| "loss": 4.0774, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.784460184110857e-05, |
| "loss": 4.0741, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.783621589359805e-05, |
| "loss": 4.0695, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782784632489127e-05, |
| "loss": 4.0706, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781946037738075e-05, |
| "loss": 4.0714, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781107442987023e-05, |
| "loss": 4.077, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.780268848235971e-05, |
| "loss": 4.0674, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.779430253484919e-05, |
| "loss": 4.0812, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.778591658733866e-05, |
| "loss": 4.0816, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777753063982814e-05, |
| "loss": 4.0778, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.776914469231762e-05, |
| "loss": 4.0744, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.77607587448071e-05, |
| "loss": 4.0709, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.775238917610031e-05, |
| "loss": 4.0666, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.774400322858979e-05, |
| "loss": 4.077, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.773561728107927e-05, |
| "loss": 4.0712, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.772723133356875e-05, |
| "loss": 4.0613, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7718861764861965e-05, |
| "loss": 4.0826, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771049219615518e-05, |
| "loss": 4.0762, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.770210624864466e-05, |
| "loss": 4.074, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.769373667993787e-05, |
| "loss": 4.0659, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768535073242735e-05, |
| "loss": 4.066, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767696478491683e-05, |
| "loss": 4.0674, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76685788374063e-05, |
| "loss": 4.0775, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766019288989578e-05, |
| "loss": 4.0744, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765180694238526e-05, |
| "loss": 4.0786, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.764342099487474e-05, |
| "loss": 4.0536, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.763505142616796e-05, |
| "loss": 4.0815, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.762666547865744e-05, |
| "loss": 4.0688, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.761827953114692e-05, |
| "loss": 4.0828, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76098935836364e-05, |
| "loss": 4.0695, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.760150763612588e-05, |
| "loss": 4.0669, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.759312168861536e-05, |
| "loss": 4.0739, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758473574110484e-05, |
| "loss": 4.0871, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.757634979359432e-05, |
| "loss": 4.0665, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75679638460838e-05, |
| "loss": 4.0746, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755957789857328e-05, |
| "loss": 4.0671, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755119195106276e-05, |
| "loss": 4.0741, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.754280600355224e-05, |
| "loss": 4.0651, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.753443643484545e-05, |
| "loss": 4.065, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7526066866138656e-05, |
| "loss": 4.071, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751768091862814e-05, |
| "loss": 4.0638, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.750929497111762e-05, |
| "loss": 4.0817, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.095146656036377, |
| "eval_runtime": 301.8968, |
| "eval_samples_per_second": 1263.978, |
| "eval_steps_per_second": 39.5, |
| "step": 763200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.75009090236071e-05, |
| "loss": 4.0642, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.749252307609658e-05, |
| "loss": 4.0597, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.748413712858606e-05, |
| "loss": 4.0727, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.747575118107554e-05, |
| "loss": 4.0681, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7467365233565016e-05, |
| "loss": 4.0822, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7458979286054496e-05, |
| "loss": 4.0692, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7450593338543976e-05, |
| "loss": 4.0672, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7442207391033456e-05, |
| "loss": 4.0789, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7433821443522936e-05, |
| "loss": 4.0632, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7425435496012416e-05, |
| "loss": 4.0637, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7417049548501896e-05, |
| "loss": 4.0777, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7408663600991376e-05, |
| "loss": 4.073, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7400277653480856e-05, |
| "loss": 4.0751, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7391891705970336e-05, |
| "loss": 4.0583, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.738350575845982e-05, |
| "loss": 4.0658, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.73751198109493e-05, |
| "loss": 4.048, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.736673386343878e-05, |
| "loss": 4.0692, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735834791592826e-05, |
| "loss": 4.0739, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7349961968417735e-05, |
| "loss": 4.0539, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7341576020907215e-05, |
| "loss": 4.079, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7333190073396695e-05, |
| "loss": 4.0789, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7324820504689904e-05, |
| "loss": 4.0745, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7316434557179384e-05, |
| "loss": 4.0697, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7308048609668864e-05, |
| "loss": 4.0706, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7299662662158344e-05, |
| "loss": 4.0713, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.729129309345156e-05, |
| "loss": 4.0672, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.728290714594104e-05, |
| "loss": 4.0588, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.727452119843052e-05, |
| "loss": 4.0733, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726613525092e-05, |
| "loss": 4.0573, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.725774930340948e-05, |
| "loss": 4.0574, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724936335589896e-05, |
| "loss": 4.0692, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724097740838844e-05, |
| "loss": 4.0648, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.723259146087792e-05, |
| "loss": 4.0665, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.72242055133674e-05, |
| "loss": 4.0716, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721583594466061e-05, |
| "loss": 4.058, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720744999715009e-05, |
| "loss": 4.0599, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71990804284433e-05, |
| "loss": 4.064, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719069448093278e-05, |
| "loss": 4.0499, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.718230853342226e-05, |
| "loss": 4.0626, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7173922585911745e-05, |
| "loss": 4.0619, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7165536638401225e-05, |
| "loss": 4.0568, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7157167069694434e-05, |
| "loss": 4.0665, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7148781122183914e-05, |
| "loss": 4.0686, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7140395174673394e-05, |
| "loss": 4.0618, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7132009227162874e-05, |
| "loss": 4.0666, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7123623279652353e-05, |
| "loss": 4.0589, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.711525371094556e-05, |
| "loss": 4.0648, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.710686776343504e-05, |
| "loss": 4.0643, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709848181592452e-05, |
| "loss": 4.0457, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7090095868414e-05, |
| "loss": 4.0704, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.708170992090348e-05, |
| "loss": 4.0531, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.707332397339296e-05, |
| "loss": 4.0636, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.706493802588244e-05, |
| "loss": 4.0541, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.705655207837192e-05, |
| "loss": 4.0478, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.70481661308614e-05, |
| "loss": 4.0508, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.703979656215462e-05, |
| "loss": 4.0514, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.703141061464409e-05, |
| "loss": 4.0714, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.702302466713357e-05, |
| "loss": 4.0511, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701465509842679e-05, |
| "loss": 4.0752, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700626915091627e-05, |
| "loss": 4.0656, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699788320340574e-05, |
| "loss": 4.04, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698949725589522e-05, |
| "loss": 4.0594, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69811113083847e-05, |
| "loss": 4.0509, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.697272536087418e-05, |
| "loss": 4.0449, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.696433941336366e-05, |
| "loss": 4.0613, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.695595346585315e-05, |
| "loss": 4.0622, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6947583897146356e-05, |
| "loss": 4.0506, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6939197949635836e-05, |
| "loss": 4.0626, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6930812002125316e-05, |
| "loss": 4.0424, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6922426054614796e-05, |
| "loss": 4.0527, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6914040107104276e-05, |
| "loss": 4.0578, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6905654159593756e-05, |
| "loss": 4.0636, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6897268212083236e-05, |
| "loss": 4.0607, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6888882264572716e-05, |
| "loss": 4.0567, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6880512695865925e-05, |
| "loss": 4.072, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6872126748355405e-05, |
| "loss": 4.0513, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6863740800844885e-05, |
| "loss": 4.0665, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.68553712321381e-05, |
| "loss": 4.0458, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.684698528462758e-05, |
| "loss": 4.0513, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683859933711706e-05, |
| "loss": 4.0558, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683021338960654e-05, |
| "loss": 4.0614, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682184382089975e-05, |
| "loss": 4.0615, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681345787338923e-05, |
| "loss": 4.0638, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680507192587871e-05, |
| "loss": 4.0464, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679668597836819e-05, |
| "loss": 4.0388, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678830003085767e-05, |
| "loss": 4.0613, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677991408334715e-05, |
| "loss": 4.0563, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677152813583663e-05, |
| "loss": 4.0533, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.67631421883261e-05, |
| "loss": 4.0613, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.675477261961932e-05, |
| "loss": 4.0552, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.67463866721088e-05, |
| "loss": 4.0508, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6738000724598285e-05, |
| "loss": 4.0522, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672961477708776e-05, |
| "loss": 4.0497, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6721245208380974e-05, |
| "loss": 4.0636, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6712859260870454e-05, |
| "loss": 4.0634, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.670447331335993e-05, |
| "loss": 4.0658, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.669608736584941e-05, |
| "loss": 4.0453, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668771779714262e-05, |
| "loss": 4.0527, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.66793318496321e-05, |
| "loss": 4.0532, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6670945902121576e-05, |
| "loss": 4.0539, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6662559954611056e-05, |
| "loss": 4.0597, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.665419038590427e-05, |
| "loss": 4.0479, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664580443839375e-05, |
| "loss": 4.057, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663741849088323e-05, |
| "loss": 4.0453, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.662903254337271e-05, |
| "loss": 4.0511, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.662064659586219e-05, |
| "loss": 4.0498, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.66122770271554e-05, |
| "loss": 4.0442, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.660389107964488e-05, |
| "loss": 4.0556, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.659550513213436e-05, |
| "loss": 4.0518, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.658711918462384e-05, |
| "loss": 4.0461, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657873323711332e-05, |
| "loss": 4.0539, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65703472896028e-05, |
| "loss": 4.0459, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656196134209228e-05, |
| "loss": 4.0545, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655359177338549e-05, |
| "loss": 4.0508, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6545205825874976e-05, |
| "loss": 4.0549, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6536819878364456e-05, |
| "loss": 4.0645, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6528433930853936e-05, |
| "loss": 4.0551, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6520064362147145e-05, |
| "loss": 4.0527, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6511678414636625e-05, |
| "loss": 4.0522, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6503292467126105e-05, |
| "loss": 4.047, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6494906519615585e-05, |
| "loss": 4.0581, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6486536950908794e-05, |
| "loss": 4.0493, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6478151003398274e-05, |
| "loss": 4.0407, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6469765055887754e-05, |
| "loss": 4.0616, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6461379108377234e-05, |
| "loss": 4.0567, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6452993160866714e-05, |
| "loss": 4.0536, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6444607213356194e-05, |
| "loss": 4.0442, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6436221265845674e-05, |
| "loss": 4.0472, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642785169713889e-05, |
| "loss": 4.0446, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.641946574962837e-05, |
| "loss": 4.06, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.641107980211785e-05, |
| "loss": 4.0516, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.640269385460733e-05, |
| "loss": 4.0577, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.639430790709681e-05, |
| "loss": 4.0292, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638592195958629e-05, |
| "loss": 4.0648, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637753601207576e-05, |
| "loss": 4.0463, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636915006456524e-05, |
| "loss": 4.0666, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636078049585846e-05, |
| "loss": 4.0464, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.635241092715167e-05, |
| "loss": 4.0469, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.634402497964115e-05, |
| "loss": 4.0527, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633563903213063e-05, |
| "loss": 4.069, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632725308462011e-05, |
| "loss": 4.0439, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631886713710959e-05, |
| "loss": 4.0593, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631048118959907e-05, |
| "loss": 4.0438, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.630209524208855e-05, |
| "loss": 4.0564, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.629370929457803e-05, |
| "loss": 4.0437, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.628532334706751e-05, |
| "loss": 4.0489, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.627693739955699e-05, |
| "loss": 4.0499, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6268567830850197e-05, |
| "loss": 4.0451, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6260181883339676e-05, |
| "loss": 4.0587, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.081997394561768, |
| "eval_runtime": 303.0095, |
| "eval_samples_per_second": 1259.337, |
| "eval_steps_per_second": 39.355, |
| "step": 839520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6251795935829156e-05, |
| "loss": 4.047, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6243409988318636e-05, |
| "loss": 4.0386, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6235040419611845e-05, |
| "loss": 4.0518, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.622665447210133e-05, |
| "loss": 4.0462, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.621826852459081e-05, |
| "loss": 4.0631, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.620988257708029e-05, |
| "loss": 4.0486, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.620152938717724e-05, |
| "loss": 4.0483, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.619314343966671e-05, |
| "loss": 4.056, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.618475749215619e-05, |
| "loss": 4.0452, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.617637154464567e-05, |
| "loss": 4.0414, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616798559713515e-05, |
| "loss": 4.0594, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615959964962463e-05, |
| "loss": 4.0499, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615123008091784e-05, |
| "loss": 4.0574, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.614284413340732e-05, |
| "loss": 4.0373, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.61344581858968e-05, |
| "loss": 4.0394, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6126072238386286e-05, |
| "loss": 4.0346, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6117702669679495e-05, |
| "loss": 4.0504, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6109316722168975e-05, |
| "loss": 4.0484, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6100930774658455e-05, |
| "loss": 4.0398, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6092544827147935e-05, |
| "loss": 4.0548, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6084158879637415e-05, |
| "loss": 4.0637, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6075789310930624e-05, |
| "loss": 4.0515, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6067403363420104e-05, |
| "loss": 4.0521, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6059017415909584e-05, |
| "loss": 4.0496, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6050631468399064e-05, |
| "loss": 4.0524, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6042245520888544e-05, |
| "loss": 4.0471, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6033859573378024e-05, |
| "loss": 4.0393, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6025473625867504e-05, |
| "loss": 4.0513, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6017087678356984e-05, |
| "loss": 4.0393, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.60087181096502e-05, |
| "loss": 4.0373, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.600033216213968e-05, |
| "loss": 4.0488, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.599194621462916e-05, |
| "loss": 4.0445, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.598356026711864e-05, |
| "loss": 4.0494, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.597519069841185e-05, |
| "loss": 4.0509, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.596680475090133e-05, |
| "loss": 4.0399, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595843518219454e-05, |
| "loss": 4.0426, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595004923468402e-05, |
| "loss": 4.044, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59416632871735e-05, |
| "loss": 4.03, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.593327733966298e-05, |
| "loss": 4.0452, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.592489139215246e-05, |
| "loss": 4.0363, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.591650544464194e-05, |
| "loss": 4.0405, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.590811949713142e-05, |
| "loss": 4.0453, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.589974992842463e-05, |
| "loss": 4.0521, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.589136398091411e-05, |
| "loss": 4.0372, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.588297803340359e-05, |
| "loss": 4.0496, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.587459208589307e-05, |
| "loss": 4.0432, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5866206138382546e-05, |
| "loss": 4.0395, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5857820190872026e-05, |
| "loss": 4.0468, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5849434243361506e-05, |
| "loss": 4.0286, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5841064674654715e-05, |
| "loss": 4.0485, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5832678727144195e-05, |
| "loss": 4.0377, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5824292779633675e-05, |
| "loss": 4.0473, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5815906832123155e-05, |
| "loss": 4.0342, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.580752088461264e-05, |
| "loss": 4.0273, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579913493710212e-05, |
| "loss": 4.0311, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579076536839533e-05, |
| "loss": 4.0284, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.578237942088481e-05, |
| "loss": 4.0545, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.577399347337429e-05, |
| "loss": 4.0335, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576560752586377e-05, |
| "loss": 4.0564, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.575722157835325e-05, |
| "loss": 4.0442, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574883563084273e-05, |
| "loss": 4.0256, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574044968333221e-05, |
| "loss": 4.0394, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.573206373582169e-05, |
| "loss": 4.0322, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57236941671149e-05, |
| "loss": 4.0275, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.571530821960438e-05, |
| "loss": 4.0423, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.570692227209386e-05, |
| "loss": 4.0461, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569853632458334e-05, |
| "loss": 4.0313, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5690166755876555e-05, |
| "loss": 4.0436, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5681780808366035e-05, |
| "loss": 4.019, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5673394860855515e-05, |
| "loss": 4.0319, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5665008913344995e-05, |
| "loss": 4.0389, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5656639344638204e-05, |
| "loss": 4.0467, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5648253397127684e-05, |
| "loss": 4.0433, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5639867449617164e-05, |
| "loss": 4.0355, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5631481502106644e-05, |
| "loss": 4.052, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5623095554596124e-05, |
| "loss": 4.0351, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5614709607085604e-05, |
| "loss": 4.046, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.560632365957508e-05, |
| "loss": 4.0299, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5597937712064564e-05, |
| "loss": 4.0337, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558956814335778e-05, |
| "loss": 4.0351, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558119857465099e-05, |
| "loss": 4.042, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.557281262714047e-05, |
| "loss": 4.0403, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.556442667962995e-05, |
| "loss": 4.0472, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.555604073211943e-05, |
| "loss": 4.0297, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554765478460891e-05, |
| "loss": 4.0191, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553926883709838e-05, |
| "loss": 4.0379, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553088288958786e-05, |
| "loss": 4.0433, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.552249694207734e-05, |
| "loss": 4.0337, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.551412737337055e-05, |
| "loss": 4.0399, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.550574142586003e-05, |
| "loss": 4.0335, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.549735547834952e-05, |
| "loss": 4.0325, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5488969530839e-05, |
| "loss": 4.0352, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.548058358332848e-05, |
| "loss": 4.0322, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.547219763581796e-05, |
| "loss": 4.0422, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5463828067111167e-05, |
| "loss": 4.0476, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5455442119600647e-05, |
| "loss": 4.0465, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5447056172090126e-05, |
| "loss": 4.0275, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5438670224579606e-05, |
| "loss": 4.0373, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5430284277069086e-05, |
| "loss": 4.0302, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5421914708362296e-05, |
| "loss": 4.037, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5413528760851775e-05, |
| "loss": 4.0415, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5405142813341255e-05, |
| "loss": 4.0315, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.539677324463447e-05, |
| "loss": 4.0356, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538838729712395e-05, |
| "loss": 4.0286, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538000134961343e-05, |
| "loss": 4.0311, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.537161540210291e-05, |
| "loss": 4.0326, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.536322945459239e-05, |
| "loss": 4.0268, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.535484350708187e-05, |
| "loss": 4.0361, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.534645755957135e-05, |
| "loss": 4.035, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533807161206083e-05, |
| "loss": 4.0277, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532970204335404e-05, |
| "loss": 4.0325, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532131609584352e-05, |
| "loss": 4.0317, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5312930148333e-05, |
| "loss": 4.0359, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.530454420082248e-05, |
| "loss": 4.0333, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.529617463211569e-05, |
| "loss": 4.0308, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528778868460517e-05, |
| "loss": 4.0446, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527940273709465e-05, |
| "loss": 4.0429, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5271033168387865e-05, |
| "loss": 4.0322, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5262647220877345e-05, |
| "loss": 4.0352, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5254261273366825e-05, |
| "loss": 4.0262, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5245875325856305e-05, |
| "loss": 4.0415, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5237489378345785e-05, |
| "loss": 4.0345, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5229103430835265e-05, |
| "loss": 4.0244, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522071748332474e-05, |
| "loss": 4.0417, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5212347914617954e-05, |
| "loss": 4.0387, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5203961967107434e-05, |
| "loss": 4.0349, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5195576019596914e-05, |
| "loss": 4.0296, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.518719007208639e-05, |
| "loss": 4.0264, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5178804124575873e-05, |
| "loss": 4.0254, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5170418177065353e-05, |
| "loss": 4.042, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.516203222955483e-05, |
| "loss": 4.0352, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.515364628204431e-05, |
| "loss": 4.0402, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.514526033453379e-05, |
| "loss": 4.0172, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5136890765827e-05, |
| "loss": 4.0375, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512850481831648e-05, |
| "loss": 4.0337, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512013524960969e-05, |
| "loss": 4.0423, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.511174930209917e-05, |
| "loss": 4.0328, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.510336335458865e-05, |
| "loss": 4.0296, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.509497740707813e-05, |
| "loss": 4.0362, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.508660783837134e-05, |
| "loss": 4.0484, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.507822189086083e-05, |
| "loss": 4.0286, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.506983594335031e-05, |
| "loss": 4.0391, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.506144999583979e-05, |
| "loss": 4.0289, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.505306404832927e-05, |
| "loss": 4.0354, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.504467810081875e-05, |
| "loss": 4.0292, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.503629215330823e-05, |
| "loss": 4.0287, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502790620579771e-05, |
| "loss": 4.0309, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501952025828719e-05, |
| "loss": 4.0318, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5011150689580396e-05, |
| "loss": 4.0367, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.070945739746094, |
| "eval_runtime": 307.1239, |
| "eval_samples_per_second": 1242.466, |
| "eval_steps_per_second": 38.828, |
| "step": 915840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.5002764742069876e-05, |
| "loss": 4.0456, |
| "step": 915968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4994378794559356e-05, |
| "loss": 4.0171, |
| "step": 916480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4985992847048836e-05, |
| "loss": 4.0327, |
| "step": 916992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4977606899538316e-05, |
| "loss": 4.0343, |
| "step": 917504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4969220952027796e-05, |
| "loss": 4.0404, |
| "step": 918016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4960835004517276e-05, |
| "loss": 4.038, |
| "step": 918528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4952449057006756e-05, |
| "loss": 4.0284, |
| "step": 919040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4944063109496236e-05, |
| "loss": 4.0392, |
| "step": 919552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4935677161985716e-05, |
| "loss": 4.0285, |
| "step": 920064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4927291214475195e-05, |
| "loss": 4.0301, |
| "step": 920576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4918905266964675e-05, |
| "loss": 4.0376, |
| "step": 921088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4910519319454155e-05, |
| "loss": 4.0329, |
| "step": 921600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4902133371943635e-05, |
| "loss": 4.0398, |
| "step": 922112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4893747424433115e-05, |
| "loss": 4.0219, |
| "step": 922624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4885361476922595e-05, |
| "loss": 4.0207, |
| "step": 923136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4876975529412075e-05, |
| "loss": 4.0174, |
| "step": 923648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4868589581901555e-05, |
| "loss": 4.0353, |
| "step": 924160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4860203634391035e-05, |
| "loss": 4.0289, |
| "step": 924672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4851817686880515e-05, |
| "loss": 4.026, |
| "step": 925184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.484343173936999e-05, |
| "loss": 4.0337, |
| "step": 925696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4835045791859475e-05, |
| "loss": 4.0484, |
| "step": 926208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.482667622315269e-05, |
| "loss": 4.0354, |
| "step": 926720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.48183066544459e-05, |
| "loss": 4.0334, |
| "step": 927232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.480992070693538e-05, |
| "loss": 4.0348, |
| "step": 927744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.480153475942486e-05, |
| "loss": 4.0389, |
| "step": 928256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.479314881191434e-05, |
| "loss": 4.0278, |
| "step": 928768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.478477924320755e-05, |
| "loss": 4.0221, |
| "step": 929280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.477639329569703e-05, |
| "loss": 4.0351, |
| "step": 929792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.476800734818651e-05, |
| "loss": 4.0164, |
| "step": 930304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.475962140067599e-05, |
| "loss": 4.0241, |
| "step": 930816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.475123545316546e-05, |
| "loss": 4.0308, |
| "step": 931328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.474284950565494e-05, |
| "loss": 4.0268, |
| "step": 931840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.473446355814443e-05, |
| "loss": 4.0345, |
| "step": 932352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.472607761063391e-05, |
| "loss": 4.0304, |
| "step": 932864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.471769166312339e-05, |
| "loss": 4.0272, |
| "step": 933376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4709338473220334e-05, |
| "loss": 4.0262, |
| "step": 933888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4700952525709814e-05, |
| "loss": 4.0286, |
| "step": 934400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.469256657819929e-05, |
| "loss": 4.0122, |
| "step": 934912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.468418063068877e-05, |
| "loss": 4.0311, |
| "step": 935424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.467579468317825e-05, |
| "loss": 4.0195, |
| "step": 935936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4667408735667727e-05, |
| "loss": 4.0247, |
| "step": 936448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4659022788157207e-05, |
| "loss": 4.0298, |
| "step": 936960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4650636840646686e-05, |
| "loss": 4.0368, |
| "step": 937472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4642250893136166e-05, |
| "loss": 4.0175, |
| "step": 937984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.463388132442938e-05, |
| "loss": 4.031, |
| "step": 938496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.462549537691886e-05, |
| "loss": 4.0282, |
| "step": 939008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.461710942940834e-05, |
| "loss": 4.0211, |
| "step": 939520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.460872348189782e-05, |
| "loss": 4.0313, |
| "step": 940032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.46003375343873e-05, |
| "loss": 4.013, |
| "step": 940544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.459196796568051e-05, |
| "loss": 4.025, |
| "step": 941056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.458358201816999e-05, |
| "loss": 4.0252, |
| "step": 941568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.457519607065947e-05, |
| "loss": 4.0281, |
| "step": 942080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.456681012314895e-05, |
| "loss": 4.0172, |
| "step": 942592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455844055444216e-05, |
| "loss": 4.013, |
| "step": 943104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455005460693164e-05, |
| "loss": 4.0121, |
| "step": 943616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.454168503822485e-05, |
| "loss": 4.0103, |
| "step": 944128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4533299090714336e-05, |
| "loss": 4.0413, |
| "step": 944640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4524913143203816e-05, |
| "loss": 4.0131, |
| "step": 945152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4516527195693296e-05, |
| "loss": 4.0384, |
| "step": 945664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4508141248182776e-05, |
| "loss": 4.0278, |
| "step": 946176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4499755300672256e-05, |
| "loss": 4.01, |
| "step": 946688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4491369353161736e-05, |
| "loss": 4.0208, |
| "step": 947200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4482983405651216e-05, |
| "loss": 4.0163, |
| "step": 947712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4474597458140696e-05, |
| "loss": 4.012, |
| "step": 948224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4466227889433905e-05, |
| "loss": 4.022, |
| "step": 948736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4457841941923385e-05, |
| "loss": 4.0267, |
| "step": 949248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4449472373216594e-05, |
| "loss": 4.0187, |
| "step": 949760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4441086425706074e-05, |
| "loss": 4.0249, |
| "step": 950272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4432700478195554e-05, |
| "loss": 4.0036, |
| "step": 950784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4424314530685034e-05, |
| "loss": 4.0162, |
| "step": 951296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.441592858317452e-05, |
| "loss": 4.0225, |
| "step": 951808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4407542635664e-05, |
| "loss": 4.0309, |
| "step": 952320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439915668815348e-05, |
| "loss": 4.0273, |
| "step": 952832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4390770740642954e-05, |
| "loss": 4.0219, |
| "step": 953344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4382384793132433e-05, |
| "loss": 4.0315, |
| "step": 953856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.437401522442565e-05, |
| "loss": 4.0185, |
| "step": 954368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.436562927691512e-05, |
| "loss": 4.0289, |
| "step": 954880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43572433294046e-05, |
| "loss": 4.0153, |
| "step": 955392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.434887376069782e-05, |
| "loss": 4.0204, |
| "step": 955904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43404878131873e-05, |
| "loss": 4.0157, |
| "step": 956416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.433210186567677e-05, |
| "loss": 4.0236, |
| "step": 956928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.432371591816626e-05, |
| "loss": 4.0294, |
| "step": 957440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.431532997065574e-05, |
| "loss": 4.0304, |
| "step": 957952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.430694402314522e-05, |
| "loss": 4.0164, |
| "step": 958464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.42985580756347e-05, |
| "loss": 4.0007, |
| "step": 958976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.429017212812418e-05, |
| "loss": 4.0222, |
| "step": 959488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.428180255941739e-05, |
| "loss": 4.0251, |
| "step": 960000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.427341661190687e-05, |
| "loss": 4.0186, |
| "step": 960512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.426503066439635e-05, |
| "loss": 4.024, |
| "step": 961024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.425664471688583e-05, |
| "loss": 4.0206, |
| "step": 961536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.424825876937531e-05, |
| "loss": 4.014, |
| "step": 962048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4239889200668516e-05, |
| "loss": 4.0199, |
| "step": 962560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4231503253157996e-05, |
| "loss": 4.0172, |
| "step": 963072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.422315006325494e-05, |
| "loss": 4.023, |
| "step": 963584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.421476411574442e-05, |
| "loss": 4.029, |
| "step": 964096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.42063781682339e-05, |
| "loss": 4.0338, |
| "step": 964608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419799222072338e-05, |
| "loss": 4.0103, |
| "step": 965120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418960627321286e-05, |
| "loss": 4.0214, |
| "step": 965632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418122032570234e-05, |
| "loss": 4.0186, |
| "step": 966144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.417283437819182e-05, |
| "loss": 4.0212, |
| "step": 966656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.41644484306813e-05, |
| "loss": 4.0259, |
| "step": 967168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.415606248317078e-05, |
| "loss": 4.0149, |
| "step": 967680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.414767653566026e-05, |
| "loss": 4.0188, |
| "step": 968192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.413929058814974e-05, |
| "loss": 4.0115, |
| "step": 968704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.413090464063922e-05, |
| "loss": 4.0202, |
| "step": 969216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.412253507193243e-05, |
| "loss": 4.0156, |
| "step": 969728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.411414912442191e-05, |
| "loss": 4.0115, |
| "step": 970240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.410576317691139e-05, |
| "loss": 4.0208, |
| "step": 970752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4097377229400876e-05, |
| "loss": 4.0178, |
| "step": 971264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4089007660694085e-05, |
| "loss": 4.0127, |
| "step": 971776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4080621713183565e-05, |
| "loss": 4.0184, |
| "step": 972288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4072235765673045e-05, |
| "loss": 4.0123, |
| "step": 972800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4063866196966254e-05, |
| "loss": 4.0208, |
| "step": 973312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4055480249455734e-05, |
| "loss": 4.0176, |
| "step": 973824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4047094301945214e-05, |
| "loss": 4.0189, |
| "step": 974336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4038708354434694e-05, |
| "loss": 4.0238, |
| "step": 974848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4030322406924174e-05, |
| "loss": 4.0308, |
| "step": 975360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.402195283821738e-05, |
| "loss": 4.0169, |
| "step": 975872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.401356689070686e-05, |
| "loss": 4.0193, |
| "step": 976384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.400518094319634e-05, |
| "loss": 4.0123, |
| "step": 976896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.399679499568583e-05, |
| "loss": 4.0243, |
| "step": 977408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398842542697904e-05, |
| "loss": 4.0181, |
| "step": 977920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398003947946852e-05, |
| "loss": 4.0066, |
| "step": 978432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3971653531958e-05, |
| "loss": 4.0216, |
| "step": 978944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.396326758444748e-05, |
| "loss": 4.0255, |
| "step": 979456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.395488163693696e-05, |
| "loss": 4.0189, |
| "step": 979968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.394649568942643e-05, |
| "loss": 4.014, |
| "step": 980480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.393812612071965e-05, |
| "loss": 4.0072, |
| "step": 980992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.392974017320913e-05, |
| "loss": 4.012, |
| "step": 981504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.392135422569861e-05, |
| "loss": 4.0286, |
| "step": 982016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.391296827818808e-05, |
| "loss": 4.0184, |
| "step": 982528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.390458233067757e-05, |
| "loss": 4.0254, |
| "step": 983040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.389619638316705e-05, |
| "loss": 3.9999, |
| "step": 983552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.388781043565653e-05, |
| "loss": 4.0235, |
| "step": 984064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387942448814601e-05, |
| "loss": 4.0208, |
| "step": 984576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387103854063549e-05, |
| "loss": 4.0251, |
| "step": 985088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.38626689719287e-05, |
| "loss": 4.0162, |
| "step": 985600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.385428302441818e-05, |
| "loss": 4.0144, |
| "step": 986112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3845897076907657e-05, |
| "loss": 4.0188, |
| "step": 986624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3837511129397137e-05, |
| "loss": 4.0348, |
| "step": 987136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3829125181886616e-05, |
| "loss": 4.0127, |
| "step": 987648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3820755613179826e-05, |
| "loss": 4.0204, |
| "step": 988160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3812386044473035e-05, |
| "loss": 4.0159, |
| "step": 988672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.380400009696252e-05, |
| "loss": 4.0213, |
| "step": 989184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3795614149452e-05, |
| "loss": 4.011, |
| "step": 989696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.378722820194148e-05, |
| "loss": 4.0196, |
| "step": 990208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377884225443096e-05, |
| "loss": 4.0119, |
| "step": 990720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377045630692044e-05, |
| "loss": 4.0152, |
| "step": 991232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.376208673821365e-05, |
| "loss": 4.0242, |
| "step": 991744 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.062049388885498, |
| "eval_runtime": 302.459, |
| "eval_samples_per_second": 1261.629, |
| "eval_steps_per_second": 39.427, |
| "step": 992160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.375370079070313e-05, |
| "loss": 4.0191, |
| "step": 992256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.374531484319261e-05, |
| "loss": 4.0038, |
| "step": 992768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.373692889568209e-05, |
| "loss": 4.0134, |
| "step": 993280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372854294817157e-05, |
| "loss": 4.0247, |
| "step": 993792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372015700066105e-05, |
| "loss": 4.0203, |
| "step": 994304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.371177105315053e-05, |
| "loss": 4.024, |
| "step": 994816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.370338510564001e-05, |
| "loss": 4.0116, |
| "step": 995328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.369501553693322e-05, |
| "loss": 4.0254, |
| "step": 995840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.36866295894227e-05, |
| "loss": 4.012, |
| "step": 996352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3678243641912186e-05, |
| "loss": 4.0147, |
| "step": 996864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3669857694401666e-05, |
| "loss": 4.0204, |
| "step": 997376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3661471746891146e-05, |
| "loss": 4.0208, |
| "step": 997888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3653102178184355e-05, |
| "loss": 4.0262, |
| "step": 998400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3644716230673835e-05, |
| "loss": 4.004, |
| "step": 998912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3636330283163315e-05, |
| "loss": 4.0049, |
| "step": 999424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3627944335652795e-05, |
| "loss": 4.0051, |
| "step": 999936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.361955838814227e-05, |
| "loss": 4.0176, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.361117244063175e-05, |
| "loss": 4.0121, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.360278649312123e-05, |
| "loss": 4.0096, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.359440054561071e-05, |
| "loss": 4.0212, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.358601459810019e-05, |
| "loss": 4.0277, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.357762865058967e-05, |
| "loss": 4.0243, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3569259081882884e-05, |
| "loss": 4.017, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3560873134372363e-05, |
| "loss": 4.0198, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3552487186861843e-05, |
| "loss": 4.0215, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.354410123935132e-05, |
| "loss": 4.0148, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.353573167064453e-05, |
| "loss": 4.0042, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.352734572313401e-05, |
| "loss": 4.0244, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351895977562349e-05, |
| "loss": 3.9996, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351057382811297e-05, |
| "loss": 4.009, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.350218788060245e-05, |
| "loss": 4.0114, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.349381831189566e-05, |
| "loss": 4.019, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.348543236438514e-05, |
| "loss": 4.0186, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.347704641687462e-05, |
| "loss": 4.0151, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.346867684816784e-05, |
| "loss": 4.015, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.346029090065732e-05, |
| "loss": 4.0099, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34519049531468e-05, |
| "loss": 4.0081, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.344351900563628e-05, |
| "loss": 3.9978, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.343513305812576e-05, |
| "loss": 4.0174, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.342674711061524e-05, |
| "loss": 4.005, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341836116310472e-05, |
| "loss": 4.0109, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34099752155942e-05, |
| "loss": 4.0112, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3401605646887406e-05, |
| "loss": 4.0204, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3393219699376886e-05, |
| "loss": 4.0089, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3384833751866366e-05, |
| "loss": 4.0159, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3376447804355846e-05, |
| "loss": 4.0134, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3368061856845326e-05, |
| "loss": 4.0075, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335969228813854e-05, |
| "loss": 4.0123, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335130634062802e-05, |
| "loss": 3.9994, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.334293677192123e-05, |
| "loss": 4.0093, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.333455082441071e-05, |
| "loss": 4.0151, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.332616487690019e-05, |
| "loss": 4.0104, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331777892938967e-05, |
| "loss": 4.0017, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330939298187915e-05, |
| "loss": 3.996, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330100703436863e-05, |
| "loss": 4.0007, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3292621086858104e-05, |
| "loss": 3.9946, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3284235139347584e-05, |
| "loss": 4.0296, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.32758655706408e-05, |
| "loss": 3.9974, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.326747962313028e-05, |
| "loss": 4.0274, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325909367561976e-05, |
| "loss": 4.0101, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325070772810924e-05, |
| "loss": 3.9984, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3242338159402455e-05, |
| "loss": 4.004, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.323395221189193e-05, |
| "loss": 4.0012, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.322556626438141e-05, |
| "loss": 4.0015, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321718031687089e-05, |
| "loss": 4.0021, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320879436936037e-05, |
| "loss": 4.0187, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320044117945731e-05, |
| "loss": 4.0056, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.319205523194679e-05, |
| "loss": 4.0059, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.318366928443627e-05, |
| "loss": 3.9925, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.317528333692575e-05, |
| "loss": 4.0025, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.316689738941523e-05, |
| "loss": 4.0047, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315851144190471e-05, |
| "loss": 4.0191, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315012549439419e-05, |
| "loss": 4.0123, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.314173954688367e-05, |
| "loss": 4.0073, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.313335359937315e-05, |
| "loss": 4.0145, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.312498403066636e-05, |
| "loss": 4.0067, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.311659808315584e-05, |
| "loss": 4.01, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.310821213564532e-05, |
| "loss": 4.0002, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.30998261881348e-05, |
| "loss": 4.0085, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.309147299823175e-05, |
| "loss": 4.0029, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.308308705072122e-05, |
| "loss": 4.0095, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.307470110321071e-05, |
| "loss": 4.0162, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.306631515570019e-05, |
| "loss": 4.0147, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.305792920818967e-05, |
| "loss": 4.0035, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3049559639482876e-05, |
| "loss": 3.9821, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3041173691972356e-05, |
| "loss": 4.0103, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3032787744461836e-05, |
| "loss": 4.0099, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3024401796951316e-05, |
| "loss": 4.0056, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3016015849440796e-05, |
| "loss": 4.0046, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3007646280734005e-05, |
| "loss": 4.0108, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2999260333223485e-05, |
| "loss": 3.9991, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2990874385712965e-05, |
| "loss": 4.0051, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2982488438202445e-05, |
| "loss": 4.0001, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2974102490691925e-05, |
| "loss": 4.0092, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2965716543181405e-05, |
| "loss": 4.0153, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2957330595670884e-05, |
| "loss": 4.0164, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.294894464816037e-05, |
| "loss": 3.9993, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.294057507945358e-05, |
| "loss": 4.0039, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.293218913194306e-05, |
| "loss": 4.0049, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.292380318443254e-05, |
| "loss": 4.0061, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.291541723692202e-05, |
| "loss": 4.0071, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29070312894115e-05, |
| "loss": 4.0024, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289864534190098e-05, |
| "loss": 4.0043, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289025939439046e-05, |
| "loss": 3.9957, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.288188982568367e-05, |
| "loss": 4.0085, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.287350387817315e-05, |
| "loss": 3.9963, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.286511793066263e-05, |
| "loss": 3.9997, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.285673198315211e-05, |
| "loss": 4.0057, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284834603564159e-05, |
| "loss": 4.0011, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2839976466934805e-05, |
| "loss": 4.0008, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2831590519424285e-05, |
| "loss": 4.0097, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2823204571913765e-05, |
| "loss": 3.9955, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.281481862440324e-05, |
| "loss": 4.007, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2806449055696454e-05, |
| "loss": 4.0033, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2798063108185934e-05, |
| "loss": 4.0019, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278969353947914e-05, |
| "loss": 4.0106, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278130759196862e-05, |
| "loss": 4.0158, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.27729216444581e-05, |
| "loss": 4.0051, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.276453569694758e-05, |
| "loss": 4.0049, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.275614974943706e-05, |
| "loss": 3.9983, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.274778018073028e-05, |
| "loss": 4.0084, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.273939423321976e-05, |
| "loss": 4.0024, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.273102466451297e-05, |
| "loss": 3.9969, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.272263871700245e-05, |
| "loss": 4.0043, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.271425276949193e-05, |
| "loss": 4.0128, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.270586682198141e-05, |
| "loss": 4.0046, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.269748087447088e-05, |
| "loss": 4.0018, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.268909492696036e-05, |
| "loss": 3.9939, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.268070897944984e-05, |
| "loss": 3.9981, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.267232303193932e-05, |
| "loss": 4.011, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.26639370844288e-05, |
| "loss": 4.0052, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.265555113691828e-05, |
| "loss": 4.0127, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264716518940776e-05, |
| "loss": 3.9903, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.263877924189725e-05, |
| "loss": 4.0092, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2630409673190456e-05, |
| "loss": 4.0072, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2622023725679936e-05, |
| "loss": 4.0083, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2613654156973145e-05, |
| "loss": 3.9996, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2605268209462625e-05, |
| "loss": 3.9992, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2596882261952105e-05, |
| "loss": 4.0103, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2588496314441585e-05, |
| "loss": 4.0181, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2580110366931065e-05, |
| "loss": 4.0006, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2571724419420545e-05, |
| "loss": 4.0036, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2563338471910025e-05, |
| "loss": 4.0014, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2554952524399505e-05, |
| "loss": 4.0062, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2546582955692714e-05, |
| "loss": 4.0001, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.25381970081822e-05, |
| "loss": 4.0053, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252981106067168e-05, |
| "loss": 3.9972, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252142511316116e-05, |
| "loss": 3.9996, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.251305554445437e-05, |
| "loss": 4.008, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.053737640380859, |
| "eval_runtime": 295.7518, |
| "eval_samples_per_second": 1290.241, |
| "eval_steps_per_second": 40.321, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.250466959694385e-05, |
| "loss": 3.9938, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.249628364943333e-05, |
| "loss": 3.9974, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.248789770192281e-05, |
| "loss": 3.9972, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247951175441229e-05, |
| "loss": 4.0098, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247112580690177e-05, |
| "loss": 4.006, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.246273985939124e-05, |
| "loss": 4.0114, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.245435391188072e-05, |
| "loss": 3.9958, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.24459679643702e-05, |
| "loss": 4.0084, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.243758201685968e-05, |
| "loss": 3.9996, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.242919606934916e-05, |
| "loss": 4.0027, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.242081012183865e-05, |
| "loss": 4.0033, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.241242417432813e-05, |
| "loss": 4.008, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.240403822681761e-05, |
| "loss": 4.0134, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.239565227930709e-05, |
| "loss": 3.9957, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.238726633179657e-05, |
| "loss": 3.9854, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237888038428605e-05, |
| "loss": 3.9939, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237049443677553e-05, |
| "loss": 4.0008, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.236210848926501e-05, |
| "loss": 4.0022, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.235372254175448e-05, |
| "loss": 3.9964, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.234533659424396e-05, |
| "loss": 4.0055, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.233696702553718e-05, |
| "loss": 4.0171, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232859745683039e-05, |
| "loss": 4.005, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232021150931987e-05, |
| "loss": 4.0076, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.231182556180935e-05, |
| "loss": 4.0068, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2303439614298834e-05, |
| "loss": 4.0107, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2295053666788314e-05, |
| "loss": 3.9983, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.228666771927779e-05, |
| "loss": 3.9966, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.227828177176727e-05, |
| "loss": 4.005, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.226989582425675e-05, |
| "loss": 3.9889, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2261526255549956e-05, |
| "loss": 3.9944, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2253140308039436e-05, |
| "loss": 3.9982, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.224477073933265e-05, |
| "loss": 4.0028, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.223638479182213e-05, |
| "loss": 4.0065, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2227998844311605e-05, |
| "loss": 3.9934, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2219612896801085e-05, |
| "loss": 4.0078, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221122694929057e-05, |
| "loss": 3.9949, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.220285738058379e-05, |
| "loss": 3.9931, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.219447143307326e-05, |
| "loss": 3.9862, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.218608548556274e-05, |
| "loss": 4.0022, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217769953805222e-05, |
| "loss": 3.9904, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.21693135905417e-05, |
| "loss": 3.9961, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.216092764303118e-05, |
| "loss": 4.0012, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.215254169552066e-05, |
| "loss": 4.0075, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.214417212681387e-05, |
| "loss": 3.9916, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.213578617930335e-05, |
| "loss": 4.0025, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.212740023179283e-05, |
| "loss": 3.9993, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.211901428428231e-05, |
| "loss": 3.9898, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2110644715575525e-05, |
| "loss": 4.0003, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2102258768065005e-05, |
| "loss": 3.9878, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2093889199358214e-05, |
| "loss": 3.9929, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2085503251847694e-05, |
| "loss": 4.0056, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2077117304337174e-05, |
| "loss": 3.9931, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2068731356826654e-05, |
| "loss": 3.9888, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2060345409316134e-05, |
| "loss": 3.9786, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2051959461805614e-05, |
| "loss": 3.9926, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2043573514295094e-05, |
| "loss": 3.9766, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2035187566784574e-05, |
| "loss": 4.0169, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2026801619274054e-05, |
| "loss": 3.9868, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2018415671763534e-05, |
| "loss": 4.011, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.201004610305674e-05, |
| "loss": 3.9977, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.200166015554622e-05, |
| "loss": 3.9803, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.19932742080357e-05, |
| "loss": 3.9896, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.198488826052519e-05, |
| "loss": 3.9945, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.197650231301467e-05, |
| "loss": 3.9845, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.196811636550415e-05, |
| "loss": 3.9798, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.195973041799362e-05, |
| "loss": 4.0085, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.19513444704831e-05, |
| "loss": 3.998, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.194295852297258e-05, |
| "loss": 3.987, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.193457257546206e-05, |
| "loss": 3.9805, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.192618662795154e-05, |
| "loss": 3.9876, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.191783343804849e-05, |
| "loss": 3.9922, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.190944749053797e-05, |
| "loss": 4.0045, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.190106154302744e-05, |
| "loss": 3.9986, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.189267559551693e-05, |
| "loss": 3.9959, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.188428964800641e-05, |
| "loss": 4.0007, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.187590370049589e-05, |
| "loss": 3.9941, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.186751775298537e-05, |
| "loss": 3.9979, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1859148184278576e-05, |
| "loss": 3.9875, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1850762236768056e-05, |
| "loss": 3.9969, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1842376289257536e-05, |
| "loss": 3.9897, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1833990341747016e-05, |
| "loss": 3.9926, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1825604394236496e-05, |
| "loss": 4.0013, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1817218446725976e-05, |
| "loss": 4.0031, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1808848878019185e-05, |
| "loss": 3.9889, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1800479309312394e-05, |
| "loss": 3.9715, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.179209336180188e-05, |
| "loss": 3.9997, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.178370741429136e-05, |
| "loss": 3.9911, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.177532146678084e-05, |
| "loss": 3.997, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.176693551927032e-05, |
| "loss": 3.9889, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.175856595056353e-05, |
| "loss": 3.996, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.175018000305301e-05, |
| "loss": 3.9893, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.174179405554249e-05, |
| "loss": 3.988, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.173340810803197e-05, |
| "loss": 3.9876, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.172502216052145e-05, |
| "loss": 3.9942, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.171663621301093e-05, |
| "loss": 4.0018, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170825026550041e-05, |
| "loss": 4.0042, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.169986431798989e-05, |
| "loss": 3.9886, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.16914947492831e-05, |
| "loss": 3.9896, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.168310880177258e-05, |
| "loss": 3.9927, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1674722854262065e-05, |
| "loss": 3.9968, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1666336906751545e-05, |
| "loss": 3.9911, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1657950959241025e-05, |
| "loss": 3.9901, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1649565011730505e-05, |
| "loss": 3.9913, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164117906421998e-05, |
| "loss": 3.9796, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1632809495513194e-05, |
| "loss": 3.9995, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1624423548002674e-05, |
| "loss": 3.9787, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1616037600492154e-05, |
| "loss": 3.9846, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.160765165298163e-05, |
| "loss": 3.9976, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.159926570547111e-05, |
| "loss": 3.9877, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.159087975796059e-05, |
| "loss": 3.9852, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.158249381045007e-05, |
| "loss": 3.9984, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.157410786293955e-05, |
| "loss": 3.9807, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.156573829423276e-05, |
| "loss": 3.9944, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.155735234672224e-05, |
| "loss": 3.991, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.154898277801545e-05, |
| "loss": 3.989, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.154059683050493e-05, |
| "loss": 3.9972, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.153221088299441e-05, |
| "loss": 4.0003, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.152382493548389e-05, |
| "loss": 3.9951, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.151543898797337e-05, |
| "loss": 3.9866, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150705304046285e-05, |
| "loss": 3.985, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149866709295233e-05, |
| "loss": 3.9972, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149028114544181e-05, |
| "loss": 3.9898, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.148189519793129e-05, |
| "loss": 3.9864, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.14735256292245e-05, |
| "loss": 3.9911, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.146513968171398e-05, |
| "loss": 3.9987, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145675373420347e-05, |
| "loss": 3.9907, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.144838416549668e-05, |
| "loss": 3.9906, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.143999821798616e-05, |
| "loss": 3.9826, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.143161227047564e-05, |
| "loss": 3.9878, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.142322632296512e-05, |
| "loss": 3.99, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1414840375454597e-05, |
| "loss": 3.9964, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1406454427944077e-05, |
| "loss": 3.9951, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1398068480433556e-05, |
| "loss": 3.9782, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1389682532923036e-05, |
| "loss": 3.9967, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1381312964216246e-05, |
| "loss": 3.9922, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1372927016705726e-05, |
| "loss": 3.9947, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1364541069195205e-05, |
| "loss": 3.9851, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1356155121684685e-05, |
| "loss": 3.9906, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1347769174174165e-05, |
| "loss": 3.9949, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1339383226663645e-05, |
| "loss": 4.0045, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.133101365795686e-05, |
| "loss": 3.9897, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.132262771044634e-05, |
| "loss": 3.9886, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.131425814173955e-05, |
| "loss": 3.9924, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.130587219422903e-05, |
| "loss": 3.9891, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129748624671851e-05, |
| "loss": 3.9869, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128910029920799e-05, |
| "loss": 3.9977, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128071435169746e-05, |
| "loss": 3.9801, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.127232840418694e-05, |
| "loss": 3.9953, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.126394245667642e-05, |
| "loss": 3.9878, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.0457987785339355, |
| "eval_runtime": 312.8596, |
| "eval_samples_per_second": 1219.688, |
| "eval_steps_per_second": 38.116, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.12555565091659e-05, |
| "loss": 3.9887, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.124718694045912e-05, |
| "loss": 3.9835, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.12388009929486e-05, |
| "loss": 3.9823, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.123041504543808e-05, |
| "loss": 3.9955, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.122202909792756e-05, |
| "loss": 3.9945, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.121364315041704e-05, |
| "loss": 3.9976, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.120525720290652e-05, |
| "loss": 3.9837, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1196871255396e-05, |
| "loss": 3.9963, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.118848530788548e-05, |
| "loss": 3.9838, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.118011573917869e-05, |
| "loss": 3.9899, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.117172979166817e-05, |
| "loss": 3.9878, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.116334384415765e-05, |
| "loss": 3.9959, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.115495789664713e-05, |
| "loss": 3.9952, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1146588327940344e-05, |
| "loss": 3.9884, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1138202380429824e-05, |
| "loss": 3.9761, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1129816432919303e-05, |
| "loss": 3.9798, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.112144686421251e-05, |
| "loss": 3.9805, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.111306091670199e-05, |
| "loss": 3.9878, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.110467496919147e-05, |
| "loss": 3.9856, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.109628902168095e-05, |
| "loss": 3.9882, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.108790307417043e-05, |
| "loss": 4.0062, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.107951712665991e-05, |
| "loss": 3.9924, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.107113117914939e-05, |
| "loss": 3.995, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.106274523163887e-05, |
| "loss": 3.9935, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.105435928412835e-05, |
| "loss": 3.9979, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.1045973336617825e-05, |
| "loss": 3.982, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.103758738910731e-05, |
| "loss": 3.9862, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.102920144159679e-05, |
| "loss": 3.9901, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.102083187289e-05, |
| "loss": 3.9811, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.101244592537948e-05, |
| "loss": 3.975, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.100405997786896e-05, |
| "loss": 3.9833, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.099567403035844e-05, |
| "loss": 3.9924, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.098728808284792e-05, |
| "loss": 3.9918, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.09789021353374e-05, |
| "loss": 3.9842, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.097053256663061e-05, |
| "loss": 3.9923, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0962162997923826e-05, |
| "loss": 3.9814, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.09537770504133e-05, |
| "loss": 3.9788, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.094539110290278e-05, |
| "loss": 3.9723, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0937005155392266e-05, |
| "loss": 3.9911, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0928619207881746e-05, |
| "loss": 3.9766, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0920233260371226e-05, |
| "loss": 3.985, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0911847312860706e-05, |
| "loss": 3.9823, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0903461365350186e-05, |
| "loss": 3.9968, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0895075417839666e-05, |
| "loss": 3.9766, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0886705849132875e-05, |
| "loss": 3.9885, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0878319901622355e-05, |
| "loss": 3.9894, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0869933954111835e-05, |
| "loss": 3.9776, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.086158076420877e-05, |
| "loss": 3.9902, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.085319481669825e-05, |
| "loss": 3.9744, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.084480886918773e-05, |
| "loss": 3.9807, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.083642292167721e-05, |
| "loss": 3.9901, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.08280369741667e-05, |
| "loss": 3.9805, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.081965102665618e-05, |
| "loss": 3.9774, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.081126507914566e-05, |
| "loss": 3.9644, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.080287913163514e-05, |
| "loss": 3.9803, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.079449318412462e-05, |
| "loss": 3.9639, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.07861072366141e-05, |
| "loss": 3.9992, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.077772128910358e-05, |
| "loss": 3.9767, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.076933534159306e-05, |
| "loss": 3.9929, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.076096577288627e-05, |
| "loss": 3.9902, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.075257982537575e-05, |
| "loss": 3.9717, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.074419387786523e-05, |
| "loss": 3.9743, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.073580793035471e-05, |
| "loss": 3.9803, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.072742198284419e-05, |
| "loss": 3.9715, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.07190524141374e-05, |
| "loss": 3.967, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0710666466626884e-05, |
| "loss": 3.9956, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0702280519116364e-05, |
| "loss": 3.9892, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.069389457160584e-05, |
| "loss": 3.9754, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.068550862409532e-05, |
| "loss": 3.9628, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.067713905538853e-05, |
| "loss": 3.9756, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.066875310787801e-05, |
| "loss": 3.9802, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0660367160367486e-05, |
| "loss": 3.9948, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0651981212856966e-05, |
| "loss": 3.9832, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0643595265346446e-05, |
| "loss": 3.9845, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.063522569663966e-05, |
| "loss": 3.9841, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0626839749129135e-05, |
| "loss": 3.986, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.061845380161862e-05, |
| "loss": 3.9812, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.06100678541081e-05, |
| "loss": 3.9782, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.060168190659758e-05, |
| "loss": 3.9864, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.059331233789079e-05, |
| "loss": 3.975, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.058492639038027e-05, |
| "loss": 3.9808, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.057654044286975e-05, |
| "loss": 3.9893, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.056815449535923e-05, |
| "loss": 3.9874, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.055978492665244e-05, |
| "loss": 3.9782, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.055139897914192e-05, |
| "loss": 3.9594, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.05430130316314e-05, |
| "loss": 3.9849, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.053462708412088e-05, |
| "loss": 3.983, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.052625751541409e-05, |
| "loss": 3.9812, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.0517871567903575e-05, |
| "loss": 3.981, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.050950199919678e-05, |
| "loss": 3.9844, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.050111605168626e-05, |
| "loss": 3.9773, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0492730104175744e-05, |
| "loss": 3.9713, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0484344156665224e-05, |
| "loss": 3.9763, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0475958209154704e-05, |
| "loss": 3.9848, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0467572261644184e-05, |
| "loss": 3.9887, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0459186314133664e-05, |
| "loss": 3.9881, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0450800366623144e-05, |
| "loss": 3.9838, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0442414419112624e-05, |
| "loss": 3.9724, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0434044850405837e-05, |
| "loss": 3.9816, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0425658902895316e-05, |
| "loss": 3.9843, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0417272955384796e-05, |
| "loss": 3.9808, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0408919765481735e-05, |
| "loss": 3.9781, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0400533817971215e-05, |
| "loss": 3.978, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0392147870460698e-05, |
| "loss": 3.9673, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0383761922950178e-05, |
| "loss": 3.9898, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0375375975439658e-05, |
| "loss": 3.9633, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0366990027929138e-05, |
| "loss": 3.9745, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0358604080418618e-05, |
| "loss": 3.9852, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0350218132908098e-05, |
| "loss": 3.9791, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0341832185397578e-05, |
| "loss": 3.9735, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0333446237887058e-05, |
| "loss": 3.9814, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0325060290376538e-05, |
| "loss": 3.9748, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.031667434286602e-05, |
| "loss": 3.9801, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.030830477415923e-05, |
| "loss": 3.9779, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.029993520545244e-05, |
| "loss": 3.9771, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.029154925794192e-05, |
| "loss": 3.984, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.02831633104314e-05, |
| "loss": 3.9903, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0274777362920882e-05, |
| "loss": 3.9829, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0266391415410362e-05, |
| "loss": 3.9745, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0258005467899842e-05, |
| "loss": 3.9787, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0249619520389322e-05, |
| "loss": 3.983, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0241233572878795e-05, |
| "loss": 3.9736, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.023286400417201e-05, |
| "loss": 3.9751, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.022447805666149e-05, |
| "loss": 3.9787, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0216092109150968e-05, |
| "loss": 3.9886, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0207722540444184e-05, |
| "loss": 3.9768, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0199336592933664e-05, |
| "loss": 3.9812, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0190950645423144e-05, |
| "loss": 3.9713, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.018256469791262e-05, |
| "loss": 3.9784, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.01741787504021e-05, |
| "loss": 3.9759, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0165809181695316e-05, |
| "loss": 3.9889, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0157423234184796e-05, |
| "loss": 3.9824, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.014903728667427e-05, |
| "loss": 3.9658, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.014065133916375e-05, |
| "loss": 3.9804, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0132281770456965e-05, |
| "loss": 3.9839, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.012389582294644e-05, |
| "loss": 3.9837, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.011550987543592e-05, |
| "loss": 3.9775, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.01071239279254e-05, |
| "loss": 3.9752, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.009873798041488e-05, |
| "loss": 3.985, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.009035203290436e-05, |
| "loss": 3.9928, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.008196608539384e-05, |
| "loss": 3.9783, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.007358013788332e-05, |
| "loss": 3.9794, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.00651941903728e-05, |
| "loss": 3.9799, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0056808242862285e-05, |
| "loss": 3.9762, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0048422295351765e-05, |
| "loss": 3.9744, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0040036347841245e-05, |
| "loss": 3.9873, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0031650400330724e-05, |
| "loss": 3.9708, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0023280831623934e-05, |
| "loss": 3.9814, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.0014911262917146e-05, |
| "loss": 3.9782, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 3.0006525315406626e-05, |
| "loss": 3.9807, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.0401225090026855, |
| "eval_runtime": 321.1043, |
| "eval_samples_per_second": 1188.371, |
| "eval_steps_per_second": 37.137, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9998139367896106e-05, |
| "loss": 3.9703, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9989753420385586e-05, |
| "loss": 3.9705, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9981367472875066e-05, |
| "loss": 3.9885, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9972981525364546e-05, |
| "loss": 3.9812, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9964595577854026e-05, |
| "loss": 3.9909, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9956209630343506e-05, |
| "loss": 3.9703, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9947823682832982e-05, |
| "loss": 3.9846, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9939437735322462e-05, |
| "loss": 3.9755, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9931051787811942e-05, |
| "loss": 3.9799, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9922665840301422e-05, |
| "loss": 3.9771, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9914279892790902e-05, |
| "loss": 3.9866, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9905893945280382e-05, |
| "loss": 3.9847, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9897507997769862e-05, |
| "loss": 3.9751, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9889122050259345e-05, |
| "loss": 3.9691, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9880736102748825e-05, |
| "loss": 3.9724, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9872350155238305e-05, |
| "loss": 3.9668, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9863964207727785e-05, |
| "loss": 3.9768, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9855578260217265e-05, |
| "loss": 3.9749, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9847192312706745e-05, |
| "loss": 3.9752, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9838806365196222e-05, |
| "loss": 3.9973, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9830453175293167e-05, |
| "loss": 3.9826, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9822067227782647e-05, |
| "loss": 3.9847, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9813681280272127e-05, |
| "loss": 3.9799, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9805295332761607e-05, |
| "loss": 3.9855, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9796909385251087e-05, |
| "loss": 3.9754, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9788523437740567e-05, |
| "loss": 3.9715, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9780137490230043e-05, |
| "loss": 3.9797, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.977176792152326e-05, |
| "loss": 3.9719, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.976338197401274e-05, |
| "loss": 3.9593, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.975499602650222e-05, |
| "loss": 3.9745, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9746610078991692e-05, |
| "loss": 3.9795, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9738224131481175e-05, |
| "loss": 3.981, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9729838183970655e-05, |
| "loss": 3.976, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9721452236460135e-05, |
| "loss": 3.9855, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.971309904655708e-05, |
| "loss": 3.9673, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.970471309904656e-05, |
| "loss": 3.9726, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.969632715153604e-05, |
| "loss": 3.9589, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9687941204025517e-05, |
| "loss": 3.9794, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9679555256514997e-05, |
| "loss": 3.9707, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9671169309004477e-05, |
| "loss": 3.9761, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9662783361493957e-05, |
| "loss": 3.9758, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9654397413983437e-05, |
| "loss": 3.9799, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9646011466472917e-05, |
| "loss": 3.9743, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.963764189776613e-05, |
| "loss": 3.9764, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.962925595025561e-05, |
| "loss": 3.9807, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.962087000274509e-05, |
| "loss": 3.9668, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.961248405523457e-05, |
| "loss": 3.9808, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.960409810772405e-05, |
| "loss": 3.9624, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9595728539017258e-05, |
| "loss": 3.9668, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9587342591506738e-05, |
| "loss": 3.9826, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9578956643996218e-05, |
| "loss": 3.968, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.95705706964857e-05, |
| "loss": 3.9676, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.956218474897518e-05, |
| "loss": 3.9587, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.955379880146466e-05, |
| "loss": 3.9682, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.954541285395414e-05, |
| "loss": 3.9513, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.953702690644362e-05, |
| "loss": 3.9917, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.952865733773683e-05, |
| "loss": 3.9627, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.952027139022631e-05, |
| "loss": 3.9845, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9511885442715793e-05, |
| "loss": 3.9798, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9503499495205273e-05, |
| "loss": 3.9621, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9495129926498483e-05, |
| "loss": 3.9611, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9486743978987962e-05, |
| "loss": 3.9711, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9478358031477442e-05, |
| "loss": 3.9613, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9469972083966922e-05, |
| "loss": 3.9586, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9461602515260135e-05, |
| "loss": 3.9847, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9453216567749615e-05, |
| "loss": 3.9793, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9444830620239095e-05, |
| "loss": 3.9655, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9436444672728575e-05, |
| "loss": 3.9544, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9428058725218055e-05, |
| "loss": 3.9627, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9419705535314996e-05, |
| "loss": 3.9734, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9411319587804476e-05, |
| "loss": 3.9791, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9402933640293956e-05, |
| "loss": 3.9753, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9394547692783436e-05, |
| "loss": 3.9717, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9386161745272916e-05, |
| "loss": 3.9755, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9377775797762396e-05, |
| "loss": 3.9759, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.936940622905561e-05, |
| "loss": 3.9684, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.936102028154509e-05, |
| "loss": 3.9693, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.935263433403457e-05, |
| "loss": 3.9727, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.934424838652405e-05, |
| "loss": 3.9668, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.933586243901353e-05, |
| "loss": 3.9688, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9327492870306737e-05, |
| "loss": 3.976, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9319106922796217e-05, |
| "loss": 3.9809, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93107209752857e-05, |
| "loss": 3.9652, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9302335027775174e-05, |
| "loss": 3.9526, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9293949080264654e-05, |
| "loss": 3.972, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.928557951155787e-05, |
| "loss": 3.9724, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.927719356404735e-05, |
| "loss": 3.973, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9268807616536826e-05, |
| "loss": 3.97, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9260438047830042e-05, |
| "loss": 3.9729, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9252052100319522e-05, |
| "loss": 3.9668, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9243666152809002e-05, |
| "loss": 3.9605, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9235280205298475e-05, |
| "loss": 3.9692, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9226894257787955e-05, |
| "loss": 3.9692, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.921850831027744e-05, |
| "loss": 3.9785, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.921012236276692e-05, |
| "loss": 3.9809, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.92017364152564e-05, |
| "loss": 3.9774, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.919335046774588e-05, |
| "loss": 3.9594, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.918496452023536e-05, |
| "loss": 3.971, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.917657857272484e-05, |
| "loss": 3.9736, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.916819262521432e-05, |
| "loss": 3.9738, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.915982305650753e-05, |
| "loss": 3.9667, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.915143710899701e-05, |
| "loss": 3.9664, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.914305116148649e-05, |
| "loss": 3.9608, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.913466521397597e-05, |
| "loss": 3.9797, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.912629564526918e-05, |
| "loss": 3.9517, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.911790969775866e-05, |
| "loss": 3.9658, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.910952375024814e-05, |
| "loss": 3.976, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9101154181541352e-05, |
| "loss": 3.9659, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9092768234030832e-05, |
| "loss": 3.9616, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9084382286520312e-05, |
| "loss": 3.973, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9075996339009792e-05, |
| "loss": 3.9628, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9067610391499272e-05, |
| "loss": 3.9753, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9059240822792484e-05, |
| "loss": 3.9705, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9050854875281964e-05, |
| "loss": 3.9659, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9042468927771444e-05, |
| "loss": 3.9717, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9034082980260924e-05, |
| "loss": 3.9819, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9025713411554133e-05, |
| "loss": 3.9715, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9017327464043613e-05, |
| "loss": 3.9649, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9008941516533093e-05, |
| "loss": 3.9685, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9000555569022573e-05, |
| "loss": 3.9724, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8992169621512057e-05, |
| "loss": 3.9683, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8983783674001537e-05, |
| "loss": 3.9635, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.897539772649101e-05, |
| "loss": 3.97, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.896701177898049e-05, |
| "loss": 3.9774, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8958642210273706e-05, |
| "loss": 3.9695, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8950256262763186e-05, |
| "loss": 3.9696, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8941886694056398e-05, |
| "loss": 3.9614, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8933500746545878e-05, |
| "loss": 3.9647, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8925114799035358e-05, |
| "loss": 3.9641, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8916728851524838e-05, |
| "loss": 3.9814, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.890834290401431e-05, |
| "loss": 3.9753, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8899973335307527e-05, |
| "loss": 3.9597, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.889158738779701e-05, |
| "loss": 3.9693, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8883201440286483e-05, |
| "loss": 3.971, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8874815492775963e-05, |
| "loss": 3.9686, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8866429545265443e-05, |
| "loss": 3.9749, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8858043597754923e-05, |
| "loss": 3.9629, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8849674029048136e-05, |
| "loss": 3.9782, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8841288081537616e-05, |
| "loss": 3.981, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8832902134027096e-05, |
| "loss": 3.9698, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8824516186516576e-05, |
| "loss": 3.9696, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8816130239006056e-05, |
| "loss": 3.9707, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8807744291495536e-05, |
| "loss": 3.9644, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8799358343985016e-05, |
| "loss": 3.9653, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8790972396474496e-05, |
| "loss": 3.9797, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8782619206571437e-05, |
| "loss": 3.9601, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8774233259060917e-05, |
| "loss": 3.9684, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8765847311550397e-05, |
| "loss": 3.9728, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8757461364039877e-05, |
| "loss": 3.9693, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.035074234008789, |
| "eval_runtime": 294.9313, |
| "eval_samples_per_second": 1293.83, |
| "eval_steps_per_second": 40.433, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8749075416529357e-05, |
| "loss": 3.9608, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.874068946901884e-05, |
| "loss": 3.9632, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.873230352150832e-05, |
| "loss": 3.9796, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.872393395280153e-05, |
| "loss": 3.9686, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.871554800529101e-05, |
| "loss": 3.982, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.870716205778049e-05, |
| "loss": 3.9584, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.869877611026997e-05, |
| "loss": 3.9765, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.869039016275945e-05, |
| "loss": 3.9623, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8682004215248933e-05, |
| "loss": 3.9697, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.867363464654214e-05, |
| "loss": 3.9713, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.866524869903162e-05, |
| "loss": 3.9719, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.86568627515211e-05, |
| "loss": 3.9752, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.864847680401058e-05, |
| "loss": 3.9621, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.864009085650006e-05, |
| "loss": 3.9632, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.863170490898954e-05, |
| "loss": 3.9619, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8623318961479025e-05, |
| "loss": 3.962, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8614933013968498e-05, |
| "loss": 3.961, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8606563445261714e-05, |
| "loss": 3.9657, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8598177497751194e-05, |
| "loss": 3.9649, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.858979155024067e-05, |
| "loss": 3.9922, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.858140560273015e-05, |
| "loss": 3.9696, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8573036034023366e-05, |
| "loss": 3.9758, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8564666465316575e-05, |
| "loss": 3.9708, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8556280517806055e-05, |
| "loss": 3.9734, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8547894570295535e-05, |
| "loss": 3.9662, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8539508622785015e-05, |
| "loss": 3.9675, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8531122675274495e-05, |
| "loss": 3.9665, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.852273672776397e-05, |
| "loss": 3.9627, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.851435078025345e-05, |
| "loss": 3.9488, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.850596483274293e-05, |
| "loss": 3.9672, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.849759526403614e-05, |
| "loss": 3.9677, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8489209316525624e-05, |
| "loss": 3.971, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8480823369015104e-05, |
| "loss": 3.9664, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8472437421504584e-05, |
| "loss": 3.9711, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8464067852797793e-05, |
| "loss": 3.9617, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8455681905287273e-05, |
| "loss": 3.9632, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.844731233658049e-05, |
| "loss": 3.9529, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.843892638906997e-05, |
| "loss": 3.9664, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8430540441559445e-05, |
| "loss": 3.9622, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8422154494048925e-05, |
| "loss": 3.962, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8413768546538405e-05, |
| "loss": 3.9664, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8405398977831614e-05, |
| "loss": 3.9725, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8397013030321094e-05, |
| "loss": 3.965, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8388627082810578e-05, |
| "loss": 3.9677, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8380241135300058e-05, |
| "loss": 3.9722, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8371855187789538e-05, |
| "loss": 3.9582, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8363469240279018e-05, |
| "loss": 3.967, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8355083292768497e-05, |
| "loss": 3.9576, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8346697345257977e-05, |
| "loss": 3.9538, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8338327776551187e-05, |
| "loss": 3.974, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.83299582078444e-05, |
| "loss": 3.9582, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.832157226033388e-05, |
| "loss": 3.957, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.831318631282336e-05, |
| "loss": 3.9513, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.830480036531284e-05, |
| "loss": 3.9577, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.829641441780232e-05, |
| "loss": 3.9469, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.82880284702918e-05, |
| "loss": 3.9766, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.827964252278128e-05, |
| "loss": 3.9558, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.827125657527076e-05, |
| "loss": 3.9702, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.826288700656397e-05, |
| "loss": 3.9695, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.825450105905345e-05, |
| "loss": 3.9548, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.824611511154293e-05, |
| "loss": 3.9495, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.823772916403241e-05, |
| "loss": 3.9652, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.822934321652189e-05, |
| "loss": 3.9499, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8220973647815104e-05, |
| "loss": 3.9495, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8212587700304583e-05, |
| "loss": 3.9698, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8204201752794063e-05, |
| "loss": 3.9704, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8195815805283543e-05, |
| "loss": 3.9544, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8187446236576752e-05, |
| "loss": 3.9476, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8179060289066232e-05, |
| "loss": 3.9528, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8170674341555712e-05, |
| "loss": 3.9633, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8162321151652654e-05, |
| "loss": 3.9649, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8153935204142134e-05, |
| "loss": 3.9654, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8145549256631614e-05, |
| "loss": 3.9625, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8137163309121094e-05, |
| "loss": 3.9677, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8128777361610574e-05, |
| "loss": 3.9708, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8120391414100057e-05, |
| "loss": 3.9538, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8112005466589537e-05, |
| "loss": 3.9634, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8103619519079017e-05, |
| "loss": 3.9636, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8095233571568497e-05, |
| "loss": 3.9512, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8086864002861706e-05, |
| "loss": 3.9642, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8078478055351186e-05, |
| "loss": 3.9671, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8070092107840666e-05, |
| "loss": 3.9725, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.806170616033015e-05, |
| "loss": 3.9503, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.805332021281963e-05, |
| "loss": 3.9459, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8044934265309103e-05, |
| "loss": 3.9612, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8036548317798582e-05, |
| "loss": 3.9617, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8028162370288062e-05, |
| "loss": 3.9668, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8019792801581275e-05, |
| "loss": 3.9604, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.801143961167822e-05, |
| "loss": 3.9628, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.80030536641677e-05, |
| "loss": 3.9592, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.799466771665718e-05, |
| "loss": 3.9532, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.798628176914666e-05, |
| "loss": 3.9589, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.797789582163614e-05, |
| "loss": 3.9596, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.796950987412562e-05, |
| "loss": 3.9687, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7961123926615103e-05, |
| "loss": 3.9714, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7952737979104576e-05, |
| "loss": 3.9685, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7944352031594056e-05, |
| "loss": 3.9538, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7935966084083536e-05, |
| "loss": 3.9608, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7927580136573016e-05, |
| "loss": 3.9589, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7919194189062496e-05, |
| "loss": 3.9701, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.791082462035571e-05, |
| "loss": 3.9565, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.790243867284519e-05, |
| "loss": 3.9564, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.789405272533467e-05, |
| "loss": 3.9511, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7885683156627878e-05, |
| "loss": 3.9706, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7877297209117358e-05, |
| "loss": 3.9428, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.786891126160684e-05, |
| "loss": 3.9574, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.786052531409632e-05, |
| "loss": 3.9656, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.78521393665858e-05, |
| "loss": 3.9539, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.784376979787901e-05, |
| "loss": 3.9547, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.783540022917222e-05, |
| "loss": 3.9654, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7827014281661702e-05, |
| "loss": 3.9531, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7818628334151182e-05, |
| "loss": 3.9595, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7810242386640662e-05, |
| "loss": 3.9641, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.780187281793387e-05, |
| "loss": 3.9557, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.779348687042335e-05, |
| "loss": 3.9633, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.778510092291283e-05, |
| "loss": 3.9725, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.777671497540231e-05, |
| "loss": 3.9622, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7768329027891795e-05, |
| "loss": 3.9564, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7759943080381274e-05, |
| "loss": 3.961, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7751557132870754e-05, |
| "loss": 3.9621, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7743171185360234e-05, |
| "loss": 3.9568, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7734785237849714e-05, |
| "loss": 3.9556, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7726399290339194e-05, |
| "loss": 3.9564, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7718013342828674e-05, |
| "loss": 3.9717, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7709627395318154e-05, |
| "loss": 3.9595, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7701257826611367e-05, |
| "loss": 3.9613, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7692871879100847e-05, |
| "loss": 3.9514, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7684485931590327e-05, |
| "loss": 3.955, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7676099984079807e-05, |
| "loss": 3.9545, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7667714036569287e-05, |
| "loss": 3.9731, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7659344467862496e-05, |
| "loss": 3.9672, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7650974899155708e-05, |
| "loss": 3.9498, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7642588951645188e-05, |
| "loss": 3.9569, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7634219382938397e-05, |
| "loss": 3.9636, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7625833435427877e-05, |
| "loss": 3.9625, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7617447487917357e-05, |
| "loss": 3.9669, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7609061540406837e-05, |
| "loss": 3.9548, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.760067559289632e-05, |
| "loss": 3.9631, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.75922896453858e-05, |
| "loss": 3.969, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.758390369787528e-05, |
| "loss": 3.9596, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.757551775036476e-05, |
| "loss": 3.9615, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7567131802854233e-05, |
| "loss": 3.9657, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.755876223414745e-05, |
| "loss": 3.9565, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.755037628663693e-05, |
| "loss": 3.9575, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7541990339126406e-05, |
| "loss": 3.9675, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7533604391615886e-05, |
| "loss": 3.9525, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7525218444105366e-05, |
| "loss": 3.9598, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7516832496594846e-05, |
| "loss": 3.9624, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7508446549084326e-05, |
| "loss": 3.9571, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.030737400054932, |
| "eval_runtime": 293.6612, |
| "eval_samples_per_second": 1299.426, |
| "eval_steps_per_second": 40.608, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7500060601573806e-05, |
| "loss": 3.9508, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.749167465406329e-05, |
| "loss": 3.955, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.748328870655277e-05, |
| "loss": 3.9699, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.747490275904225e-05, |
| "loss": 3.9551, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.746651681153173e-05, |
| "loss": 3.972, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.745813086402121e-05, |
| "loss": 3.9513, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.744974491651069e-05, |
| "loss": 3.9676, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.744135896900017e-05, |
| "loss": 3.9509, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.743297302148965e-05, |
| "loss": 3.9655, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7424587073979125e-05, |
| "loss": 3.9594, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7416201126468605e-05, |
| "loss": 3.9608, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7407815178958085e-05, |
| "loss": 3.9685, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7399429231447565e-05, |
| "loss": 3.951, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7391043283937045e-05, |
| "loss": 3.9541, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7382657336426525e-05, |
| "loss": 3.9522, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7374271388916005e-05, |
| "loss": 3.9498, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7365885441405488e-05, |
| "loss": 3.9529, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7357515872698697e-05, |
| "loss": 3.9582, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7349129925188177e-05, |
| "loss": 3.9564, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7340743977677657e-05, |
| "loss": 3.9803, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7332358030167137e-05, |
| "loss": 3.962, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7323972082656617e-05, |
| "loss": 3.9645, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7315586135146097e-05, |
| "loss": 3.9626, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7307200187635577e-05, |
| "loss": 3.9643, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.729881424012506e-05, |
| "loss": 3.9591, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.729044467141827e-05, |
| "loss": 3.9554, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.728205872390775e-05, |
| "loss": 3.9577, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.727368915520096e-05, |
| "loss": 3.9535, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.726530320769044e-05, |
| "loss": 3.941, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7256917260179922e-05, |
| "loss": 3.9567, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7248531312669402e-05, |
| "loss": 3.9623, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7240145365158882e-05, |
| "loss": 3.9584, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.723177579645209e-05, |
| "loss": 3.9604, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.722338984894157e-05, |
| "loss": 3.9637, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.721500390143105e-05, |
| "loss": 3.9503, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.720661795392053e-05, |
| "loss": 3.952, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7198248385213743e-05, |
| "loss": 3.9486, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7189862437703223e-05, |
| "loss": 3.9527, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7181476490192703e-05, |
| "loss": 3.9509, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7173090542682183e-05, |
| "loss": 3.9533, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.716470459517166e-05, |
| "loss": 3.96, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.715631864766114e-05, |
| "loss": 3.9631, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.714793270015062e-05, |
| "loss": 3.9526, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.71395467526401e-05, |
| "loss": 3.9599, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.713116080512958e-05, |
| "loss": 3.9633, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.712277485761906e-05, |
| "loss": 3.9466, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.711438891010854e-05, |
| "loss": 3.9584, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.710600296259802e-05, |
| "loss": 3.9527, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.70976170150875e-05, |
| "loss": 3.9413, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7089247446380712e-05, |
| "loss": 3.9629, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7080861498870192e-05, |
| "loss": 3.9517, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7072475551359672e-05, |
| "loss": 3.9474, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.706408960384915e-05, |
| "loss": 3.9464, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.705572003514236e-05, |
| "loss": 3.9492, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7047334087631844e-05, |
| "loss": 3.9354, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7038948140121324e-05, |
| "loss": 3.9623, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7030562192610804e-05, |
| "loss": 3.9501, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7022192623904013e-05, |
| "loss": 3.9606, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7013806676393493e-05, |
| "loss": 3.9623, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7005420728882973e-05, |
| "loss": 3.9477, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6997034781372453e-05, |
| "loss": 3.9396, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6988648833861936e-05, |
| "loss": 3.9579, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6980279265155145e-05, |
| "loss": 3.9414, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6971893317644625e-05, |
| "loss": 3.9399, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6963507370134105e-05, |
| "loss": 3.9622, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6955121422623585e-05, |
| "loss": 3.9624, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6946751853916798e-05, |
| "loss": 3.9458, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6938365906406278e-05, |
| "loss": 3.9387, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6929979958895758e-05, |
| "loss": 3.943, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6921594011385238e-05, |
| "loss": 3.9507, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6913240821482176e-05, |
| "loss": 3.9571, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.690485487397166e-05, |
| "loss": 3.9546, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.689646892646114e-05, |
| "loss": 3.9579, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.688808297895062e-05, |
| "loss": 3.9558, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.68796970314401e-05, |
| "loss": 3.9633, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6871327462733308e-05, |
| "loss": 3.9463, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6862941515222788e-05, |
| "loss": 3.9529, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6854555567712268e-05, |
| "loss": 3.9574, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.684616962020175e-05, |
| "loss": 3.9415, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.683780005149496e-05, |
| "loss": 3.9537, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.682941410398444e-05, |
| "loss": 3.9628, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.682102815647392e-05, |
| "loss": 3.9611, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.68126422089634e-05, |
| "loss": 3.9457, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.680425626145288e-05, |
| "loss": 3.9359, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.679587031394236e-05, |
| "loss": 3.9537, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6787500745235573e-05, |
| "loss": 3.9457, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6779114797725053e-05, |
| "loss": 3.9591, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6770728850214533e-05, |
| "loss": 3.9516, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6762359281507742e-05, |
| "loss": 3.9548, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6753973333997222e-05, |
| "loss": 3.9501, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6745587386486705e-05, |
| "loss": 3.9418, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6737201438976185e-05, |
| "loss": 3.9516, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6728815491465665e-05, |
| "loss": 3.95, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6720429543955145e-05, |
| "loss": 3.9626, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6712043596444618e-05, |
| "loss": 3.9656, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6703657648934098e-05, |
| "loss": 3.9569, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6695288080227314e-05, |
| "loss": 3.9414, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6686918511520526e-05, |
| "loss": 3.9549, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6678532564010006e-05, |
| "loss": 3.9499, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6670146616499486e-05, |
| "loss": 3.9611, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6661760668988966e-05, |
| "loss": 3.9453, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6653374721478443e-05, |
| "loss": 3.9499, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6644988773967923e-05, |
| "loss": 3.9401, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6636602826457403e-05, |
| "loss": 3.9611, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6628216878946883e-05, |
| "loss": 3.9341, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6619830931436363e-05, |
| "loss": 3.9505, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6611444983925843e-05, |
| "loss": 3.9488, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6603075415219052e-05, |
| "loss": 3.9501, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6594689467708532e-05, |
| "loss": 3.9442, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6586303520198015e-05, |
| "loss": 3.9554, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6577917572687495e-05, |
| "loss": 3.9515, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6569531625176975e-05, |
| "loss": 3.9468, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6561162056470184e-05, |
| "loss": 3.9538, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6552776108959664e-05, |
| "loss": 3.9431, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6544390161449144e-05, |
| "loss": 3.9619, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6536004213938624e-05, |
| "loss": 3.9672, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6527634645231836e-05, |
| "loss": 3.9493, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6519248697721316e-05, |
| "loss": 3.9473, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6510862750210796e-05, |
| "loss": 3.9507, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6502476802700276e-05, |
| "loss": 3.9517, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6494090855189756e-05, |
| "loss": 3.9474, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6485704907679236e-05, |
| "loss": 3.9486, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6477318960168716e-05, |
| "loss": 3.9456, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.64689330126582e-05, |
| "loss": 3.9646, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.646056344395141e-05, |
| "loss": 3.9527, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.645217749644089e-05, |
| "loss": 3.9486, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.644379154893037e-05, |
| "loss": 3.9426, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.643540560141985e-05, |
| "loss": 3.9494, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.642701965390933e-05, |
| "loss": 3.9429, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.641865008520254e-05, |
| "loss": 3.9649, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.641026413769202e-05, |
| "loss": 3.9585, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.64018781901815e-05, |
| "loss": 3.9471, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6393492242670974e-05, |
| "loss": 3.9431, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.638512267396419e-05, |
| "loss": 3.9531, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6376753105257402e-05, |
| "loss": 3.9498, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6368367157746882e-05, |
| "loss": 3.96, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6359981210236362e-05, |
| "loss": 3.9434, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6351595262725842e-05, |
| "loss": 3.9605, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6343209315215322e-05, |
| "loss": 3.955, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6334823367704802e-05, |
| "loss": 3.9521, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.632643742019428e-05, |
| "loss": 3.9536, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.631805147268376e-05, |
| "loss": 3.9549, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6309681903976975e-05, |
| "loss": 3.9465, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6301295956466448e-05, |
| "loss": 3.9499, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6292910008955928e-05, |
| "loss": 3.9558, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6284524061445408e-05, |
| "loss": 3.9479, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6276154492738624e-05, |
| "loss": 3.9471, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.62677685452281e-05, |
| "loss": 3.9531, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.625938259771758e-05, |
| "loss": 3.9487, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.027097702026367, |
| "eval_runtime": 297.0227, |
| "eval_samples_per_second": 1284.72, |
| "eval_steps_per_second": 40.148, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.625099665020706e-05, |
| "loss": 3.9461, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.624261070269654e-05, |
| "loss": 3.9444, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6234241133989752e-05, |
| "loss": 3.963, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6225855186479232e-05, |
| "loss": 3.9466, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6217469238968712e-05, |
| "loss": 3.9662, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6209083291458192e-05, |
| "loss": 3.9396, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6200730101555137e-05, |
| "loss": 3.959, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6192344154044617e-05, |
| "loss": 3.9454, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6183958206534097e-05, |
| "loss": 3.9509, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.617558863782731e-05, |
| "loss": 3.9522, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.616720269031679e-05, |
| "loss": 3.9511, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.615881674280627e-05, |
| "loss": 3.96, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.615043079529575e-05, |
| "loss": 3.944, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6142044847785223e-05, |
| "loss": 3.9444, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6133658900274706e-05, |
| "loss": 3.9474, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6125272952764186e-05, |
| "loss": 3.938, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6116887005253666e-05, |
| "loss": 3.9466, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6108517436546875e-05, |
| "loss": 3.9492, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6100131489036355e-05, |
| "loss": 3.9464, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6091745541525835e-05, |
| "loss": 3.9692, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6083359594015315e-05, |
| "loss": 3.9577, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6074973646504798e-05, |
| "loss": 3.9596, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6066604077798007e-05, |
| "loss": 3.9531, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6058218130287487e-05, |
| "loss": 3.9533, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6049848561580696e-05, |
| "loss": 3.9486, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6041462614070176e-05, |
| "loss": 3.9486, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.603307666655966e-05, |
| "loss": 3.9512, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.602469071904914e-05, |
| "loss": 3.9428, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.601630477153862e-05, |
| "loss": 3.9382, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.60079188240281e-05, |
| "loss": 3.9443, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.599953287651758e-05, |
| "loss": 3.9538, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.599114692900706e-05, |
| "loss": 3.9527, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.598277736030027e-05, |
| "loss": 3.9536, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5974391412789752e-05, |
| "loss": 3.9574, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5966005465279232e-05, |
| "loss": 3.9373, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5957619517768712e-05, |
| "loss": 3.9439, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.594924994906192e-05, |
| "loss": 3.9444, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.59408640015514e-05, |
| "loss": 3.9398, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.593247805404088e-05, |
| "loss": 3.9434, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.592409210653036e-05, |
| "loss": 3.9416, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.591570615901984e-05, |
| "loss": 3.9516, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5907320211509324e-05, |
| "loss": 3.9546, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5898934263998804e-05, |
| "loss": 3.9475, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5890548316488284e-05, |
| "loss": 3.945, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5882178747781493e-05, |
| "loss": 3.9583, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5873792800270973e-05, |
| "loss": 3.9391, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5865406852760453e-05, |
| "loss": 3.9499, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5857020905249933e-05, |
| "loss": 3.9435, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5848651336543146e-05, |
| "loss": 3.9347, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5840265389032625e-05, |
| "loss": 3.9554, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5831879441522105e-05, |
| "loss": 3.9436, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.582349349401158e-05, |
| "loss": 3.9427, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5815107546501062e-05, |
| "loss": 3.9347, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5806737977794278e-05, |
| "loss": 3.9406, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5798352030283758e-05, |
| "loss": 3.9274, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.578996608277323e-05, |
| "loss": 3.9526, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5781596514066447e-05, |
| "loss": 3.9437, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5773210566555927e-05, |
| "loss": 3.9474, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5764824619045407e-05, |
| "loss": 3.9542, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5756438671534883e-05, |
| "loss": 3.941, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5748052724024363e-05, |
| "loss": 3.93, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5739666776513843e-05, |
| "loss": 3.9516, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5731280829003323e-05, |
| "loss": 3.9333, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5722894881492803e-05, |
| "loss": 3.9323, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5714525312786016e-05, |
| "loss": 3.9528, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5706139365275496e-05, |
| "loss": 3.9501, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5697769796568705e-05, |
| "loss": 3.9374, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5689383849058185e-05, |
| "loss": 3.936, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5680997901547665e-05, |
| "loss": 3.9301, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5672611954037145e-05, |
| "loss": 3.9384, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5664242385330357e-05, |
| "loss": 3.95, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5655856437819837e-05, |
| "loss": 3.95, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5647470490309317e-05, |
| "loss": 3.9493, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5639084542798797e-05, |
| "loss": 3.9433, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5630698595288277e-05, |
| "loss": 3.9608, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5622312647777757e-05, |
| "loss": 3.9349, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5613926700267237e-05, |
| "loss": 3.9502, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5605540752756717e-05, |
| "loss": 3.9454, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.559717118404993e-05, |
| "loss": 3.9303, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.558878523653941e-05, |
| "loss": 3.9508, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.558039928902889e-05, |
| "loss": 3.9532, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.557201334151837e-05, |
| "loss": 3.9467, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.556362739400785e-05, |
| "loss": 3.9383, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.555524144649733e-05, |
| "loss": 3.929, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.554685549898681e-05, |
| "loss": 3.944, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5538469551476292e-05, |
| "loss": 3.9438, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.55300999827695e-05, |
| "loss": 3.948, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.552171403525898e-05, |
| "loss": 3.939, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5513360845355923e-05, |
| "loss": 3.9489, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5504974897845403e-05, |
| "loss": 3.9386, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5496588950334883e-05, |
| "loss": 3.9387, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5488203002824363e-05, |
| "loss": 3.9381, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5479817055313843e-05, |
| "loss": 3.9363, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5471431107803323e-05, |
| "loss": 3.9575, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5463045160292803e-05, |
| "loss": 3.9525, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5454659212782283e-05, |
| "loss": 3.9536, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5446273265271763e-05, |
| "loss": 3.9316, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5437887317761243e-05, |
| "loss": 3.9478, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.542950137025072e-05, |
| "loss": 3.9421, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.54211154227402e-05, |
| "loss": 3.9488, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5412745854033415e-05, |
| "loss": 3.94, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.540435990652289e-05, |
| "loss": 3.9419, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.539597395901237e-05, |
| "loss": 3.933, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.538758801150185e-05, |
| "loss": 3.9542, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.537920206399133e-05, |
| "loss": 3.9252, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.537083249528454e-05, |
| "loss": 3.9424, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.536244654777402e-05, |
| "loss": 3.9414, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.53540606002635e-05, |
| "loss": 3.9407, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5345674652752984e-05, |
| "loss": 3.9359, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5337288705242464e-05, |
| "loss": 3.951, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5328902757731944e-05, |
| "loss": 3.9393, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5320516810221424e-05, |
| "loss": 3.9375, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5312130862710904e-05, |
| "loss": 3.9486, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5303744915200383e-05, |
| "loss": 3.9363, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5295375346493593e-05, |
| "loss": 3.9493, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5287005777786805e-05, |
| "loss": 3.9574, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5278619830276285e-05, |
| "loss": 3.9448, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5270233882765765e-05, |
| "loss": 3.9417, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5261847935255245e-05, |
| "loss": 3.9383, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5253461987744725e-05, |
| "loss": 3.9423, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5245076040234205e-05, |
| "loss": 3.94, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5236690092723685e-05, |
| "loss": 3.9459, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5228304145213165e-05, |
| "loss": 3.9317, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5219934576506377e-05, |
| "loss": 3.9542, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5211548628995857e-05, |
| "loss": 3.946, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5203162681485337e-05, |
| "loss": 3.9435, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5194776733974817e-05, |
| "loss": 3.9304, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5186407165268026e-05, |
| "loss": 3.9421, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.517802121775751e-05, |
| "loss": 3.9371, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.516963527024699e-05, |
| "loss": 3.9504, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.516124932273647e-05, |
| "loss": 3.9541, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.515286337522595e-05, |
| "loss": 3.944, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.514449380651916e-05, |
| "loss": 3.929, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.513610785900864e-05, |
| "loss": 3.945, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.512772191149812e-05, |
| "loss": 3.9404, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5119335963987602e-05, |
| "loss": 3.9532, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5110950016477075e-05, |
| "loss": 3.9343, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5102564068966555e-05, |
| "loss": 3.9483, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.509419450025977e-05, |
| "loss": 3.9488, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.508580855274925e-05, |
| "loss": 3.9491, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5077422605238727e-05, |
| "loss": 3.9463, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5069036657728207e-05, |
| "loss": 3.9456, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5060650710217687e-05, |
| "loss": 3.9365, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5052264762707167e-05, |
| "loss": 3.9429, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5043878815196647e-05, |
| "loss": 3.9463, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5035492867686127e-05, |
| "loss": 3.943, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5027106920175607e-05, |
| "loss": 3.9376, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.501873735146882e-05, |
| "loss": 3.9471, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.50103514039583e-05, |
| "loss": 3.9399, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.023096561431885, |
| "eval_runtime": 294.3715, |
| "eval_samples_per_second": 1296.291, |
| "eval_steps_per_second": 40.51, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.500196545644778e-05, |
| "loss": 3.9333, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.499357950893726e-05, |
| "loss": 3.9375, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.498519356142674e-05, |
| "loss": 3.9523, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.497680761391622e-05, |
| "loss": 3.9395, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.49684216664057e-05, |
| "loss": 3.9549, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.496003571889518e-05, |
| "loss": 3.9377, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.495164977138466e-05, |
| "loss": 3.9462, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.494326382387414e-05, |
| "loss": 3.9399, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.493487787636362e-05, |
| "loss": 3.9416, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.49264919288531e-05, |
| "loss": 3.9436, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.491810598134258e-05, |
| "loss": 3.9446, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4909720033832056e-05, |
| "loss": 3.9509, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4901334086321536e-05, |
| "loss": 3.9371, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.489294813881102e-05, |
| "loss": 3.938, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.48845621913005e-05, |
| "loss": 3.939, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.487617624378998e-05, |
| "loss": 3.9284, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.486779029627946e-05, |
| "loss": 3.9388, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.485940434876894e-05, |
| "loss": 3.9411, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4851018401258415e-05, |
| "loss": 3.9362, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4842632453747895e-05, |
| "loss": 3.9611, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.483426288504111e-05, |
| "loss": 3.9464, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.482587693753059e-05, |
| "loss": 3.9511, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4817490990020068e-05, |
| "loss": 3.9452, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4809105042509548e-05, |
| "loss": 3.9457, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.480073547380276e-05, |
| "loss": 3.9387, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.479234952629224e-05, |
| "loss": 3.9439, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.478396357878172e-05, |
| "loss": 3.9411, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.47755776312712e-05, |
| "loss": 3.9354, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.476719168376068e-05, |
| "loss": 3.9336, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.475882211505389e-05, |
| "loss": 3.9332, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.475043616754337e-05, |
| "loss": 3.942, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.474205022003285e-05, |
| "loss": 3.946, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4733664272522332e-05, |
| "loss": 3.9432, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.472529470381554e-05, |
| "loss": 3.9473, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4716925135108754e-05, |
| "loss": 3.9299, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4708539187598234e-05, |
| "loss": 3.9376, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4700169618891443e-05, |
| "loss": 3.9365, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4691783671380926e-05, |
| "loss": 3.929, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4683397723870406e-05, |
| "loss": 3.9389, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4675011776359886e-05, |
| "loss": 3.9306, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4666625828849363e-05, |
| "loss": 3.9452, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4658239881338843e-05, |
| "loss": 3.9445, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4649853933828323e-05, |
| "loss": 3.9399, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4641467986317803e-05, |
| "loss": 3.9338, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4633082038807282e-05, |
| "loss": 3.9515, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4624696091296766e-05, |
| "loss": 3.9301, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4616310143786242e-05, |
| "loss": 3.9439, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4607924196275722e-05, |
| "loss": 3.9346, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4599554627568935e-05, |
| "loss": 3.9216, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4591168680058415e-05, |
| "loss": 3.9478, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4582799111351627e-05, |
| "loss": 3.9339, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4574413163841107e-05, |
| "loss": 3.9337, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4566027216330587e-05, |
| "loss": 3.9291, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4557641268820067e-05, |
| "loss": 3.928, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4549255321309544e-05, |
| "loss": 3.9254, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4540869373799024e-05, |
| "loss": 3.9348, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4532483426288504e-05, |
| "loss": 3.9381, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4524097478777987e-05, |
| "loss": 3.9396, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4515711531267467e-05, |
| "loss": 3.9449, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4507325583756947e-05, |
| "loss": 3.9333, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4498939636246423e-05, |
| "loss": 3.9216, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4490570067539636e-05, |
| "loss": 3.9447, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4482184120029116e-05, |
| "loss": 3.9271, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4473798172518596e-05, |
| "loss": 3.9209, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4465412225008076e-05, |
| "loss": 3.9486, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4457042656301288e-05, |
| "loss": 3.9423, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4448656708790768e-05, |
| "loss": 3.9326, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4440270761280248e-05, |
| "loss": 3.9288, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4431884813769725e-05, |
| "loss": 3.9187, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4423498866259205e-05, |
| "loss": 3.9315, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4415112918748688e-05, |
| "loss": 3.9453, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4406726971238168e-05, |
| "loss": 3.9383, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4398341023727648e-05, |
| "loss": 3.9396, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4389971455020857e-05, |
| "loss": 3.934, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4381585507510337e-05, |
| "loss": 3.9527, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4373199559999817e-05, |
| "loss": 3.9271, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4364813612489297e-05, |
| "loss": 3.9413, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.435644404378251e-05, |
| "loss": 3.9393, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4348074475075722e-05, |
| "loss": 3.9175, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.43396885275652e-05, |
| "loss": 3.9457, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.433130258005468e-05, |
| "loss": 3.9426, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.432291663254416e-05, |
| "loss": 3.9397, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4314530685033642e-05, |
| "loss": 3.9327, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.430614473752312e-05, |
| "loss": 3.9197, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.42977587900126e-05, |
| "loss": 3.9372, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4289372842502078e-05, |
| "loss": 3.9321, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.428100327379529e-05, |
| "loss": 3.9413, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.427261732628477e-05, |
| "loss": 3.9331, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.426423137877425e-05, |
| "loss": 3.939, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4255861810067463e-05, |
| "loss": 3.9313, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4247475862556943e-05, |
| "loss": 3.9339, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4239089915046423e-05, |
| "loss": 3.9306, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4230703967535903e-05, |
| "loss": 3.9291, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.422231802002538e-05, |
| "loss": 3.9479, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.421393207251486e-05, |
| "loss": 3.9433, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4205546125004343e-05, |
| "loss": 3.9497, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4197160177493823e-05, |
| "loss": 3.9221, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4188790608787032e-05, |
| "loss": 3.9371, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4180404661276512e-05, |
| "loss": 3.9354, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4172018713765992e-05, |
| "loss": 3.9388, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4163632766255472e-05, |
| "loss": 3.9352, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4155263197548684e-05, |
| "loss": 3.9295, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4146877250038164e-05, |
| "loss": 3.9295, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4138491302527644e-05, |
| "loss": 3.9451, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4130121733820853e-05, |
| "loss": 3.9172, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4121735786310333e-05, |
| "loss": 3.9325, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4113349838799813e-05, |
| "loss": 3.929, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4104963891289296e-05, |
| "loss": 3.9375, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4096577943778776e-05, |
| "loss": 3.9288, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4088208375071986e-05, |
| "loss": 3.9409, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4079822427561466e-05, |
| "loss": 3.9342, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4071436480050945e-05, |
| "loss": 3.9278, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4063050532540425e-05, |
| "loss": 3.9392, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4054664585029905e-05, |
| "loss": 3.9292, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4046278637519385e-05, |
| "loss": 3.9422, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4037892690008865e-05, |
| "loss": 3.947, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4029523121302078e-05, |
| "loss": 3.9353, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4021137173791554e-05, |
| "loss": 3.9306, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4012751226281034e-05, |
| "loss": 3.9333, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4004365278770514e-05, |
| "loss": 3.9328, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3995979331259998e-05, |
| "loss": 3.9346, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3987609762553207e-05, |
| "loss": 3.9361, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3979223815042687e-05, |
| "loss": 3.9252, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3970837867532167e-05, |
| "loss": 3.9469, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3962451920021647e-05, |
| "loss": 3.9371, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3954065972511127e-05, |
| "loss": 3.9392, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3945680025000606e-05, |
| "loss": 3.923, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.393731045629382e-05, |
| "loss": 3.9329, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.39289245087833e-05, |
| "loss": 3.9297, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.392053856127278e-05, |
| "loss": 3.9407, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.391215261376226e-05, |
| "loss": 3.9432, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3903766666251735e-05, |
| "loss": 3.9393, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.389538071874122e-05, |
| "loss": 3.9204, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.388701115003443e-05, |
| "loss": 3.9404, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.387862520252391e-05, |
| "loss": 3.9316, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3870239255013388e-05, |
| "loss": 3.9452, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3861853307502868e-05, |
| "loss": 3.9274, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3853467359992348e-05, |
| "loss": 3.9437, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3845081412481828e-05, |
| "loss": 3.9389, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.383671184377504e-05, |
| "loss": 3.9435, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.382832589626452e-05, |
| "loss": 3.9341, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3819939948754e-05, |
| "loss": 3.9362, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.381155400124348e-05, |
| "loss": 3.9282, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.380316805373296e-05, |
| "loss": 3.9376, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.379478210622244e-05, |
| "loss": 3.9382, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.378639615871192e-05, |
| "loss": 3.9362, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3778026590005132e-05, |
| "loss": 3.9302, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3769640642494612e-05, |
| "loss": 3.9355, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3761254694984092e-05, |
| "loss": 3.937, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.019817352294922, |
| "eval_runtime": 287.439, |
| "eval_samples_per_second": 1327.555, |
| "eval_steps_per_second": 41.487, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.375286874747357e-05, |
| "loss": 3.9329, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.374448279996305e-05, |
| "loss": 3.9275, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.373611323125626e-05, |
| "loss": 3.944, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3727727283745745e-05, |
| "loss": 3.9319, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.371934133623522e-05, |
| "loss": 3.9425, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.37109553887247e-05, |
| "loss": 3.9326, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.370256944121418e-05, |
| "loss": 3.9363, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.369418349370366e-05, |
| "loss": 3.9365, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.368579754619314e-05, |
| "loss": 3.9324, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.367741159868262e-05, |
| "loss": 3.9348, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3669042029975833e-05, |
| "loss": 3.9391, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3660656082465313e-05, |
| "loss": 3.9406, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3652286513758522e-05, |
| "loss": 3.9301, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3643900566248002e-05, |
| "loss": 3.9281, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3635514618737482e-05, |
| "loss": 3.9331, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3627128671226966e-05, |
| "loss": 3.9188, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3618742723716446e-05, |
| "loss": 3.931, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3610356776205926e-05, |
| "loss": 3.9328, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3601970828695402e-05, |
| "loss": 3.9315, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3593584881184882e-05, |
| "loss": 3.9521, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3585198933674362e-05, |
| "loss": 3.9409, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3576829364967575e-05, |
| "loss": 3.9437, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3568443417457055e-05, |
| "loss": 3.9355, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3560057469946534e-05, |
| "loss": 3.9352, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3551671522436014e-05, |
| "loss": 3.9328, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3543301953729224e-05, |
| "loss": 3.9377, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3534916006218703e-05, |
| "loss": 3.9298, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3526530058708183e-05, |
| "loss": 3.9303, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3518144111197667e-05, |
| "loss": 3.9271, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3509774542490876e-05, |
| "loss": 3.9272, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.350140497378409e-05, |
| "loss": 3.9326, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.349301902627357e-05, |
| "loss": 3.9388, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3484633078763045e-05, |
| "loss": 3.9358, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3476247131252528e-05, |
| "loss": 3.9434, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3467861183742008e-05, |
| "loss": 3.9231, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3459475236231488e-05, |
| "loss": 3.9326, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3451089288720968e-05, |
| "loss": 3.928, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3442703341210448e-05, |
| "loss": 3.9232, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3434317393699928e-05, |
| "loss": 3.9344, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3425947824993137e-05, |
| "loss": 3.9173, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.341756187748262e-05, |
| "loss": 3.9352, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.34091759299721e-05, |
| "loss": 3.9363, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3400789982461577e-05, |
| "loss": 3.9345, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3392404034951057e-05, |
| "loss": 3.9289, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3384018087440537e-05, |
| "loss": 3.944, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3375632139930017e-05, |
| "loss": 3.9186, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3367246192419497e-05, |
| "loss": 3.9403, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.335887662371271e-05, |
| "loss": 3.9282, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.335049067620219e-05, |
| "loss": 3.9107, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.334210472869167e-05, |
| "loss": 3.9455, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.333371878118115e-05, |
| "loss": 3.9254, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3325349212474358e-05, |
| "loss": 3.9311, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3316963264963838e-05, |
| "loss": 3.9191, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.330857731745332e-05, |
| "loss": 3.9199, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.330020774874653e-05, |
| "loss": 3.9236, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.329182180123601e-05, |
| "loss": 3.924, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.328343585372549e-05, |
| "loss": 3.9342, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.327504990621497e-05, |
| "loss": 3.9292, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.326666395870445e-05, |
| "loss": 3.9439, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.325827801119393e-05, |
| "loss": 3.9252, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.324989206368341e-05, |
| "loss": 3.9156, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.324150611617289e-05, |
| "loss": 3.9347, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.323312016866237e-05, |
| "loss": 3.9146, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3224750599955583e-05, |
| "loss": 3.9148, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.321636465244506e-05, |
| "loss": 3.9393, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.320797870493454e-05, |
| "loss": 3.9346, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3199609136227755e-05, |
| "loss": 3.9227, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3191223188717232e-05, |
| "loss": 3.9234, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3182837241206712e-05, |
| "loss": 3.9146, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.317445129369619e-05, |
| "loss": 3.9214, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.316606534618567e-05, |
| "loss": 3.9376, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3157695777478884e-05, |
| "loss": 3.932, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3149309829968364e-05, |
| "loss": 3.931, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3140923882457844e-05, |
| "loss": 3.9278, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3132537934947324e-05, |
| "loss": 3.9459, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3124151987436804e-05, |
| "loss": 3.9215, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3115766039926284e-05, |
| "loss": 3.9345, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3107396471219493e-05, |
| "loss": 3.9295, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3099010523708976e-05, |
| "loss": 3.9117, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3090624576198456e-05, |
| "loss": 3.9374, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3082238628687936e-05, |
| "loss": 3.937, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3073852681177413e-05, |
| "loss": 3.9342, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3065483112470625e-05, |
| "loss": 3.9294, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3057097164960105e-05, |
| "loss": 3.9177, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3048711217449585e-05, |
| "loss": 3.924, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3040325269939065e-05, |
| "loss": 3.9274, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3031939322428545e-05, |
| "loss": 3.9351, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3023553374918025e-05, |
| "loss": 3.9225, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3015183806211238e-05, |
| "loss": 3.9326, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3006797858700714e-05, |
| "loss": 3.9217, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2998411911190197e-05, |
| "loss": 3.9284, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2990025963679677e-05, |
| "loss": 3.9212, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2981640016169157e-05, |
| "loss": 3.9237, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2973254068658637e-05, |
| "loss": 3.9398, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2964868121148117e-05, |
| "loss": 3.9393, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2956482173637594e-05, |
| "loss": 3.9434, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2948112604930806e-05, |
| "loss": 3.9134, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2939726657420286e-05, |
| "loss": 3.9324, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.293134070990977e-05, |
| "loss": 3.924, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.292297114120298e-05, |
| "loss": 3.9361, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.291458519369246e-05, |
| "loss": 3.9264, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.290619924618194e-05, |
| "loss": 3.9269, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2897813298671415e-05, |
| "loss": 3.9188, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.28894273511609e-05, |
| "loss": 3.935, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.288104140365038e-05, |
| "loss": 3.9124, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.287267183494359e-05, |
| "loss": 3.9291, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2864285887433068e-05, |
| "loss": 3.9168, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2855899939922548e-05, |
| "loss": 3.9343, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2847513992412027e-05, |
| "loss": 3.9266, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2839128044901507e-05, |
| "loss": 3.9286, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.283074209739099e-05, |
| "loss": 3.9257, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.282235614988047e-05, |
| "loss": 3.9303, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.281397020236995e-05, |
| "loss": 3.9275, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.280560063366316e-05, |
| "loss": 3.9241, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.279723106495637e-05, |
| "loss": 3.9288, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2788845117445852e-05, |
| "loss": 3.946, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2780475548739065e-05, |
| "loss": 3.9282, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.277208960122854e-05, |
| "loss": 3.9235, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.276370365371802e-05, |
| "loss": 3.9288, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.27553177062075e-05, |
| "loss": 3.9261, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.274693175869698e-05, |
| "loss": 3.9207, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.273854581118646e-05, |
| "loss": 3.9313, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2730159863675944e-05, |
| "loss": 3.9174, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2721773916165424e-05, |
| "loss": 3.9376, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2713404347458633e-05, |
| "loss": 3.9353, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2705018399948113e-05, |
| "loss": 3.9268, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2696632452437593e-05, |
| "loss": 3.9199, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.268824650492707e-05, |
| "loss": 3.9224, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2679876936220286e-05, |
| "loss": 3.9264, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2671490988709766e-05, |
| "loss": 3.935, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2663105041199246e-05, |
| "loss": 3.9324, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2654719093688722e-05, |
| "loss": 3.9346, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2646333146178202e-05, |
| "loss": 3.9117, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2637947198667682e-05, |
| "loss": 3.9353, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2629561251157162e-05, |
| "loss": 3.9265, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2621175303646646e-05, |
| "loss": 3.9356, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2612805734939855e-05, |
| "loss": 3.9184, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2604419787429335e-05, |
| "loss": 3.9393, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2596033839918815e-05, |
| "loss": 3.9321, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2587647892408294e-05, |
| "loss": 3.9386, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2579261944897774e-05, |
| "loss": 3.9233, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2570875997387254e-05, |
| "loss": 3.9378, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2562490049876734e-05, |
| "loss": 3.9159, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2554120481169947e-05, |
| "loss": 3.9318, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2545734533659427e-05, |
| "loss": 3.9332, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2537348586148903e-05, |
| "loss": 3.9245, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2528962638638383e-05, |
| "loss": 3.9288, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.25205930699316e-05, |
| "loss": 3.9238, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.251220712242108e-05, |
| "loss": 3.931, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.016632080078125, |
| "eval_runtime": 287.6617, |
| "eval_samples_per_second": 1326.527, |
| "eval_steps_per_second": 41.455, |
| "step": 1679040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2503821174910556e-05, |
| "loss": 3.9264, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2495435227400036e-05, |
| "loss": 3.9199, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2487049279889516e-05, |
| "loss": 3.9351, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2478663332378996e-05, |
| "loss": 3.925, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2470277384868476e-05, |
| "loss": 3.937, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2461891437357955e-05, |
| "loss": 3.9251, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2453505489847435e-05, |
| "loss": 3.9299, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2445119542336915e-05, |
| "loss": 3.9312, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2436733594826395e-05, |
| "loss": 3.9263, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2428347647315875e-05, |
| "loss": 3.9294, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2419961699805355e-05, |
| "loss": 3.9312, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2411575752294835e-05, |
| "loss": 3.9328, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2403206183588048e-05, |
| "loss": 3.9249, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.239483661488126e-05, |
| "loss": 3.9216, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2386450667370737e-05, |
| "loss": 3.9266, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2378064719860217e-05, |
| "loss": 3.9112, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2369678772349697e-05, |
| "loss": 3.9242, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2361292824839177e-05, |
| "loss": 3.9249, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2352906877328657e-05, |
| "loss": 3.9244, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.234452092981814e-05, |
| "loss": 3.9464, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2336134982307616e-05, |
| "loss": 3.9374, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2327749034797096e-05, |
| "loss": 3.939, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2319363087286576e-05, |
| "loss": 3.9262, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2310977139776056e-05, |
| "loss": 3.9279, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2302591192265536e-05, |
| "loss": 3.929, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2294205244755016e-05, |
| "loss": 3.93, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2285819297244496e-05, |
| "loss": 3.9244, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.227744972853771e-05, |
| "loss": 3.9267, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.226906378102719e-05, |
| "loss": 3.9171, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.226067783351667e-05, |
| "loss": 3.9222, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2252291886006145e-05, |
| "loss": 3.9234, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2243905938495625e-05, |
| "loss": 3.9317, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.223551999098511e-05, |
| "loss": 3.9304, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.222713404347459e-05, |
| "loss": 3.9364, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2218764474767798e-05, |
| "loss": 3.9168, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.221039490606101e-05, |
| "loss": 3.9215, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2202025337354223e-05, |
| "loss": 3.926, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2193639389843702e-05, |
| "loss": 3.9168, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2185253442333182e-05, |
| "loss": 3.9235, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2176867494822662e-05, |
| "loss": 3.9173, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2168481547312142e-05, |
| "loss": 3.9271, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.216009559980162e-05, |
| "loss": 3.93, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.21517096522911e-05, |
| "loss": 3.9281, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.214332370478058e-05, |
| "loss": 3.925, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2134937757270062e-05, |
| "loss": 3.9343, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2126551809759542e-05, |
| "loss": 3.9196, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2118165862249022e-05, |
| "loss": 3.9301, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2109779914738502e-05, |
| "loss": 3.9227, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.210139396722798e-05, |
| "loss": 3.9006, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.209300801971746e-05, |
| "loss": 3.9454, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.208462207220694e-05, |
| "loss": 3.9156, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.207623612469642e-05, |
| "loss": 3.9266, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.206786655598963e-05, |
| "loss": 3.9075, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.205948060847911e-05, |
| "loss": 3.9169, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.205109466096859e-05, |
| "loss": 3.9095, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.204270871345807e-05, |
| "loss": 3.9201, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.203433914475128e-05, |
| "loss": 3.9257, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2025953197240763e-05, |
| "loss": 3.9259, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2017567249730243e-05, |
| "loss": 3.9339, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2009181302219723e-05, |
| "loss": 3.9193, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2000795354709203e-05, |
| "loss": 3.9083, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1992425786002412e-05, |
| "loss": 3.9323, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1984039838491892e-05, |
| "loss": 3.9079, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1975670269785105e-05, |
| "loss": 3.9043, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1967284322274585e-05, |
| "loss": 3.9325, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1958898374764065e-05, |
| "loss": 3.93, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1950512427253545e-05, |
| "loss": 3.913, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1942126479743024e-05, |
| "loss": 3.9203, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1933740532232504e-05, |
| "loss": 3.9066, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.192535458472198e-05, |
| "loss": 3.9157, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1916968637211464e-05, |
| "loss": 3.9268, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1908599068504677e-05, |
| "loss": 3.925, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1900213120994157e-05, |
| "loss": 3.9279, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1891827173483633e-05, |
| "loss": 3.9253, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1883441225973113e-05, |
| "loss": 3.9368, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1875071657266326e-05, |
| "loss": 3.9112, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.186668570975581e-05, |
| "loss": 3.9307, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1858299762245286e-05, |
| "loss": 3.9188, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1849930193538498e-05, |
| "loss": 3.9118, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1841544246027978e-05, |
| "loss": 3.928, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1833174677321187e-05, |
| "loss": 3.9321, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.182478872981067e-05, |
| "loss": 3.9265, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.181640278230015e-05, |
| "loss": 3.9224, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.180801683478963e-05, |
| "loss": 3.9104, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1799630887279107e-05, |
| "loss": 3.9132, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1791244939768587e-05, |
| "loss": 3.925, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1782858992258067e-05, |
| "loss": 3.9292, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1774473044747547e-05, |
| "loss": 3.9178, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.176610347604076e-05, |
| "loss": 3.9279, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.175771752853024e-05, |
| "loss": 3.9091, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.174933158101972e-05, |
| "loss": 3.9231, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.17409456335092e-05, |
| "loss": 3.9158, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.173255968599868e-05, |
| "loss": 3.9193, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.172417373848816e-05, |
| "loss": 3.9322, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.171578779097764e-05, |
| "loss": 3.9336, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.170740184346712e-05, |
| "loss": 3.9345, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.169904865356406e-05, |
| "loss": 3.9073, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.169066270605354e-05, |
| "loss": 3.9259, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.168227675854302e-05, |
| "loss": 3.9155, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.16738908110325e-05, |
| "loss": 3.9285, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.166550486352198e-05, |
| "loss": 3.9191, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.165711891601146e-05, |
| "loss": 3.9226, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.164873296850094e-05, |
| "loss": 3.9142, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.164034702099042e-05, |
| "loss": 3.9264, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.16319610734799e-05, |
| "loss": 3.909, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1623607883576842e-05, |
| "loss": 3.9234, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1615221936066325e-05, |
| "loss": 3.9103, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1606835988555805e-05, |
| "loss": 3.9294, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1598450041045285e-05, |
| "loss": 3.9165, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1590064093534762e-05, |
| "loss": 3.9202, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1581694524827974e-05, |
| "loss": 3.9189, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1573308577317454e-05, |
| "loss": 3.925, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1564922629806934e-05, |
| "loss": 3.9204, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1556536682296414e-05, |
| "loss": 3.918, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1548150734785894e-05, |
| "loss": 3.9249, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1539764787275374e-05, |
| "loss": 3.9346, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1531395218568583e-05, |
| "loss": 3.926, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1523009271058063e-05, |
| "loss": 3.9188, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1514623323547543e-05, |
| "loss": 3.9166, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1506237376037026e-05, |
| "loss": 3.9243, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1497851428526506e-05, |
| "loss": 3.9153, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1489481859819715e-05, |
| "loss": 3.9272, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1481095912309195e-05, |
| "loss": 3.911, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1472709964798675e-05, |
| "loss": 3.9311, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1464324017288155e-05, |
| "loss": 3.9272, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1455938069777635e-05, |
| "loss": 3.9223, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1447552122267115e-05, |
| "loss": 3.9136, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1439166174756595e-05, |
| "loss": 3.9178, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1430780227246075e-05, |
| "loss": 3.9201, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1422410658539288e-05, |
| "loss": 3.9317, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1414024711028764e-05, |
| "loss": 3.9248, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.140565514232198e-05, |
| "loss": 3.9242, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.139726919481146e-05, |
| "loss": 3.9069, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.138888324730094e-05, |
| "loss": 3.9301, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1380497299790417e-05, |
| "loss": 3.9188, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1372111352279897e-05, |
| "loss": 3.9316, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1363725404769376e-05, |
| "loss": 3.9133, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1355339457258856e-05, |
| "loss": 3.9286, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.134696988855207e-05, |
| "loss": 3.9221, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.133858394104155e-05, |
| "loss": 3.9384, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.133019799353103e-05, |
| "loss": 3.916, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.132181204602051e-05, |
| "loss": 3.9278, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.131342609850999e-05, |
| "loss": 3.9132, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.130504015099947e-05, |
| "loss": 3.9262, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.129667058229268e-05, |
| "loss": 3.9235, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.128828463478216e-05, |
| "loss": 3.9167, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.127989868727164e-05, |
| "loss": 3.9269, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.127152911856485e-05, |
| "loss": 3.9123, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.126314317105433e-05, |
| "loss": 3.9284, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.014286041259766, |
| "eval_runtime": 304.027, |
| "eval_samples_per_second": 1255.122, |
| "eval_steps_per_second": 39.223, |
| "step": 1755360 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 7.212728713647345e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|