| { |
| "best_metric": 4.006655216217041, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/lstm/0/checkpoints/checkpoint-1984320", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 1984320, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8199, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.5523, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0685, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 7.0067, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.957, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.9311, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7662, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 6.6691, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 6.5638, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 6.4756, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 6.4027, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 6.333, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 6.259, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989098268236324e-05, |
| "loss": 6.1928, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988259673485272e-05, |
| "loss": 6.1457, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98742107873422e-05, |
| "loss": 6.0947, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986582483983168e-05, |
| "loss": 6.0521, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985743889232116e-05, |
| "loss": 6.0143, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984905294481064e-05, |
| "loss": 5.9792, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.9405, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.9079, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 5.8683, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.981552553357229e-05, |
| "loss": 5.8467, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 5.805, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 5.786, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790384069844466e-05, |
| "loss": 5.7539, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9781998122333946e-05, |
| "loss": 5.7311, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773612174823426e-05, |
| "loss": 5.7097, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.97652262273129e-05, |
| "loss": 5.6784, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 5.6532, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748470711095595e-05, |
| "loss": 5.6408, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974008476358507e-05, |
| "loss": 5.6287, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.973169881607455e-05, |
| "loss": 5.607, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 5.5839, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 5.5777, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706557352346724e-05, |
| "loss": 5.55, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9698171404836204e-05, |
| "loss": 5.5363, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968980183612942e-05, |
| "loss": 5.5033, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96814158886189e-05, |
| "loss": 5.5068, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967302994110837e-05, |
| "loss": 5.4845, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966464399359785e-05, |
| "loss": 5.4754, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965625804608733e-05, |
| "loss": 5.463, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964788847738054e-05, |
| "loss": 5.4566, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963950252987002e-05, |
| "loss": 5.4261, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96311165823595e-05, |
| "loss": 5.4201, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962273063484898e-05, |
| "loss": 5.4147, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.961434468733847e-05, |
| "loss": 5.3988, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 5.4009, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959758917112116e-05, |
| "loss": 5.3788, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958920322361064e-05, |
| "loss": 5.3582, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958081727610012e-05, |
| "loss": 5.3658, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957244770739333e-05, |
| "loss": 5.345, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956406175988281e-05, |
| "loss": 5.3389, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955567581237229e-05, |
| "loss": 5.3113, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954728986486177e-05, |
| "loss": 5.322, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9538920296154976e-05, |
| "loss": 5.2986, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530534348644456e-05, |
| "loss": 5.3266, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522148401133936e-05, |
| "loss": 5.2873, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951376245362342e-05, |
| "loss": 5.2876, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95053765061129e-05, |
| "loss": 5.2791, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2604, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948860461109186e-05, |
| "loss": 5.2471, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948021866358134e-05, |
| "loss": 5.2476, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947183271607082e-05, |
| "loss": 5.23, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94634467685603e-05, |
| "loss": 5.2241, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945506082104978e-05, |
| "loss": 5.2412, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944670763114672e-05, |
| "loss": 5.2212, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94383216836362e-05, |
| "loss": 5.2078, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942993573612568e-05, |
| "loss": 5.1838, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942154978861516e-05, |
| "loss": 5.1809, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941316384110464e-05, |
| "loss": 5.1896, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404794272397856e-05, |
| "loss": 5.1862, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396408324887336e-05, |
| "loss": 5.1772, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388022377376816e-05, |
| "loss": 5.1737, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379636429866296e-05, |
| "loss": 5.1672, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371266861159505e-05, |
| "loss": 5.1589, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362880913648985e-05, |
| "loss": 5.1456, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354494966138465e-05, |
| "loss": 5.1354, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346109018627945e-05, |
| "loss": 5.1299, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 5.117, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 5.1248, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 5.1149, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 5.1179, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 5.0993, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9295809712368554e-05, |
| "loss": 5.0797, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9287423764858034e-05, |
| "loss": 5.0947, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9279037817347514e-05, |
| "loss": 5.0898, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 5.0872, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 5.072, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925389635361968e-05, |
| "loss": 5.0747, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924551040610916e-05, |
| "loss": 5.0602, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923712445859864e-05, |
| "loss": 5.0545, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922875488989185e-05, |
| "loss": 5.0548, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922036894238133e-05, |
| "loss": 5.0489, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921198299487081e-05, |
| "loss": 5.0504, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920359704736029e-05, |
| "loss": 5.0452, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919521109984978e-05, |
| "loss": 5.0417, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918684153114299e-05, |
| "loss": 5.0199, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917845558363247e-05, |
| "loss": 5.017, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917006963612195e-05, |
| "loss": 5.0148, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916168368861143e-05, |
| "loss": 5.0139, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9153314119904636e-05, |
| "loss": 5.007, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9144928172394116e-05, |
| "loss": 4.9996, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136558603687325e-05, |
| "loss": 4.9908, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128172656176805e-05, |
| "loss": 4.9981, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.9721, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.9789, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9103014813645245e-05, |
| "loss": 4.9768, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909462886613473e-05, |
| "loss": 4.9675, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908624291862421e-05, |
| "loss": 4.9646, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907785697111369e-05, |
| "loss": 4.9671, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90694874024069e-05, |
| "loss": 4.9546, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906110145489638e-05, |
| "loss": 4.9572, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905271550738586e-05, |
| "loss": 4.9493, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.9401, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903594361236482e-05, |
| "loss": 4.9448, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90275576648543e-05, |
| "loss": 4.9431, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901918809614751e-05, |
| "loss": 4.9365, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901080214863699e-05, |
| "loss": 4.9236, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900241620112647e-05, |
| "loss": 4.9297, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899403025361595e-05, |
| "loss": 4.9301, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.898564430610543e-05, |
| "loss": 4.9193, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.897725835859491e-05, |
| "loss": 4.9062, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968872411084396e-05, |
| "loss": 4.9138, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960502842377605e-05, |
| "loss": 4.9149, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952133273670815e-05, |
| "loss": 4.9012, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943747326160294e-05, |
| "loss": 4.8942, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935361378649774e-05, |
| "loss": 4.8847, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926975431139254e-05, |
| "loss": 4.8918, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918589483628734e-05, |
| "loss": 4.8822, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910203536118214e-05, |
| "loss": 4.896, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890181758860769e-05, |
| "loss": 4.8859, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889343164109717e-05, |
| "loss": 4.8682, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888504569358665e-05, |
| "loss": 4.8752, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887667612487986e-05, |
| "loss": 4.8732, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886829017736934e-05, |
| "loss": 4.8685, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885990422985882e-05, |
| "loss": 4.8714, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88515182823483e-05, |
| "loss": 4.8578, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884314871364151e-05, |
| "loss": 4.8623, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883476276613099e-05, |
| "loss": 4.8669, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882639319742421e-05, |
| "loss": 4.851, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881800724991369e-05, |
| "loss": 4.8473, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880962130240316e-05, |
| "loss": 4.8484, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.8337, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879284940738212e-05, |
| "loss": 4.834, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87844634598716e-05, |
| "loss": 4.8386, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877607751236109e-05, |
| "loss": 4.8225, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876769156485057e-05, |
| "loss": 4.8276, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875930561734005e-05, |
| "loss": 4.8316, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875093604863326e-05, |
| "loss": 4.8273, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.783571720123291, |
| "eval_runtime": 294.9308, |
| "eval_samples_per_second": 1293.833, |
| "eval_steps_per_second": 40.433, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874255010112274e-05, |
| "loss": 4.8105, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873416415361222e-05, |
| "loss": 4.8092, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.87257782061017e-05, |
| "loss": 4.8259, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871739225859118e-05, |
| "loss": 4.8087, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709006311080657e-05, |
| "loss": 4.821, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700620363570137e-05, |
| "loss": 4.7924, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692234416059616e-05, |
| "loss": 4.811, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683864847352826e-05, |
| "loss": 4.7928, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675478899842306e-05, |
| "loss": 4.7893, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667092952331785e-05, |
| "loss": 4.794, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.7974, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865033743611448e-05, |
| "loss": 4.7972, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864196786740769e-05, |
| "loss": 4.7793, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.863358191989717e-05, |
| "loss": 4.779, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862519597238665e-05, |
| "loss": 4.7706, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.861681002487613e-05, |
| "loss": 4.7726, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860842407736561e-05, |
| "loss": 4.7693, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860003812985509e-05, |
| "loss": 4.7697, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.859165218234457e-05, |
| "loss": 4.7662, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858326623483405e-05, |
| "loss": 4.786, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857488028732352e-05, |
| "loss": 4.7612, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.856649433981301e-05, |
| "loss": 4.7702, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.855810839230249e-05, |
| "loss": 4.7566, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.85497388235957e-05, |
| "loss": 4.766, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541369254888915e-05, |
| "loss": 4.7548, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532983307378395e-05, |
| "loss": 4.7521, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524597359867875e-05, |
| "loss": 4.7483, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851621141235735e-05, |
| "loss": 4.7439, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850782546484683e-05, |
| "loss": 4.7285, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849943951733631e-05, |
| "loss": 4.7434, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.7404, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8482684001119e-05, |
| "loss": 4.7427, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847429805360848e-05, |
| "loss": 4.7337, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8465912106097964e-05, |
| "loss": 4.7371, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845754253739117e-05, |
| "loss": 4.7279, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844915658988065e-05, |
| "loss": 4.7245, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844077064237013e-05, |
| "loss": 4.7085, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843238469485961e-05, |
| "loss": 4.7155, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842399874734909e-05, |
| "loss": 4.7158, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.841561279983857e-05, |
| "loss": 4.7129, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840722685232805e-05, |
| "loss": 4.7169, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839884090481753e-05, |
| "loss": 4.7157, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839047133611074e-05, |
| "loss": 4.7038, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838208538860022e-05, |
| "loss": 4.7091, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.837371581989343e-05, |
| "loss": 4.7088, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836532987238292e-05, |
| "loss": 4.6963, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83569439248724e-05, |
| "loss": 4.7082, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834855797736188e-05, |
| "loss": 4.6943, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834017202985136e-05, |
| "loss": 4.6808, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8331802461144566e-05, |
| "loss": 4.7015, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8323416513634046e-05, |
| "loss": 4.6847, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8315030566123526e-05, |
| "loss": 4.6833, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8306644618613006e-05, |
| "loss": 4.6728, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298258671102486e-05, |
| "loss": 4.6824, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.6699, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.7019, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273117207374655e-05, |
| "loss": 4.6703, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.826474763866787e-05, |
| "loss": 4.6824, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.825636169115735e-05, |
| "loss": 4.68, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.824797574364683e-05, |
| "loss": 4.6659, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823958979613631e-05, |
| "loss": 4.6571, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823120384862579e-05, |
| "loss": 4.6682, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.822281790111527e-05, |
| "loss": 4.6531, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821444833240848e-05, |
| "loss": 4.6538, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820606238489796e-05, |
| "loss": 4.6699, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819767643738744e-05, |
| "loss": 4.6656, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818930686868065e-05, |
| "loss": 4.6496, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818092092117013e-05, |
| "loss": 4.6448, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817253497365961e-05, |
| "loss": 4.6437, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816414902614909e-05, |
| "loss": 4.6558, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.815576307863857e-05, |
| "loss": 4.6542, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814737713112805e-05, |
| "loss": 4.6526, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8138991183617535e-05, |
| "loss": 4.6522, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813060523610701e-05, |
| "loss": 4.653, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812221928859649e-05, |
| "loss": 4.6528, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811383334108597e-05, |
| "loss": 4.6353, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810544739357545e-05, |
| "loss": 4.6398, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809706144606493e-05, |
| "loss": 4.6367, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8088708256161873e-05, |
| "loss": 4.6236, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808032230865135e-05, |
| "loss": 4.6387, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8071936361140827e-05, |
| "loss": 4.6355, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8063550413630307e-05, |
| "loss": 4.638, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8055164466119786e-05, |
| "loss": 4.6201, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804677851860927e-05, |
| "loss": 4.6107, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803840894990248e-05, |
| "loss": 4.6285, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803002300239196e-05, |
| "loss": 4.6237, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802163705488144e-05, |
| "loss": 4.6287, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801325110737092e-05, |
| "loss": 4.6203, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.800488153866413e-05, |
| "loss": 4.6229, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.799649559115361e-05, |
| "loss": 4.6174, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.798810964364309e-05, |
| "loss": 4.6088, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797972369613257e-05, |
| "loss": 4.6165, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797135412742578e-05, |
| "loss": 4.612, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.796296817991526e-05, |
| "loss": 4.6197, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795458223240474e-05, |
| "loss": 4.6211, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794619628489423e-05, |
| "loss": 4.6148, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793781033738371e-05, |
| "loss": 4.6019, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7929440768676916e-05, |
| "loss": 4.6022, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7921054821166396e-05, |
| "loss": 4.601, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7912668873655876e-05, |
| "loss": 4.6093, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7904282926145356e-05, |
| "loss": 4.5961, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7895896978634836e-05, |
| "loss": 4.5965, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7887527409928045e-05, |
| "loss": 4.5899, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7879141462417525e-05, |
| "loss": 4.6032, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7870755514907005e-05, |
| "loss": 4.5757, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7862369567396485e-05, |
| "loss": 4.5887, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7853983619885965e-05, |
| "loss": 4.5922, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845597672375445e-05, |
| "loss": 4.5838, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837211724864925e-05, |
| "loss": 4.5821, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782884215615814e-05, |
| "loss": 4.5925, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782045620864762e-05, |
| "loss": 4.5793, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.78120702611371e-05, |
| "loss": 4.5808, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.780368431362658e-05, |
| "loss": 4.5851, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.779529836611606e-05, |
| "loss": 4.5729, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.778691241860554e-05, |
| "loss": 4.5832, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777852647109502e-05, |
| "loss": 4.5878, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777015690238823e-05, |
| "loss": 4.5787, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.776177095487771e-05, |
| "loss": 4.567, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.775338500736719e-05, |
| "loss": 4.5788, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774499905985666e-05, |
| "loss": 4.5771, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773661311234615e-05, |
| "loss": 4.5696, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772822716483563e-05, |
| "loss": 4.5643, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771984121732511e-05, |
| "loss": 4.5659, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771147164861832e-05, |
| "loss": 4.5781, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.77030857011078e-05, |
| "loss": 4.5668, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769469975359728e-05, |
| "loss": 4.5633, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768631380608676e-05, |
| "loss": 4.5522, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767794423737997e-05, |
| "loss": 4.559, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766955828986945e-05, |
| "loss": 4.5534, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766117234235893e-05, |
| "loss": 4.5716, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765278639484841e-05, |
| "loss": 4.5661, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7644416826141616e-05, |
| "loss": 4.5461, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.763604725743483e-05, |
| "loss": 4.5557, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762766130992431e-05, |
| "loss": 4.5582, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761927536241379e-05, |
| "loss": 4.5571, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761088941490327e-05, |
| "loss": 4.5623, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.760250346739275e-05, |
| "loss": 4.5477, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.759411751988223e-05, |
| "loss": 4.553, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758573157237171e-05, |
| "loss": 4.5582, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757734562486119e-05, |
| "loss": 4.5464, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75689760561544e-05, |
| "loss": 4.5466, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756059010864388e-05, |
| "loss": 4.5526, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.755220416113336e-05, |
| "loss": 4.5358, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754383459242657e-05, |
| "loss": 4.541, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753544864491605e-05, |
| "loss": 4.5459, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7527062697405536e-05, |
| "loss": 4.5322, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7518676749895016e-05, |
| "loss": 4.5394, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7510290802384496e-05, |
| "loss": 4.5423, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7501921233677705e-05, |
| "loss": 4.5391, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.501157283782959, |
| "eval_runtime": 295.2349, |
| "eval_samples_per_second": 1292.5, |
| "eval_steps_per_second": 40.392, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493535286167185e-05, |
| "loss": 4.5248, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7485149338656665e-05, |
| "loss": 4.5264, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7476763391146145e-05, |
| "loss": 4.5456, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7468377443635625e-05, |
| "loss": 4.525, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7459991496125105e-05, |
| "loss": 4.5414, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451605548614585e-05, |
| "loss": 4.521, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443219601104065e-05, |
| "loss": 4.5366, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434833653593545e-05, |
| "loss": 4.5176, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426447706083025e-05, |
| "loss": 4.5283, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7418061758572505e-05, |
| "loss": 4.524, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7409675811061985e-05, |
| "loss": 4.5282, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401289863551465e-05, |
| "loss": 4.5336, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739292029484468e-05, |
| "loss": 4.5139, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384534347334154e-05, |
| "loss": 4.5165, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376148399823634e-05, |
| "loss": 4.5107, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7367762452313114e-05, |
| "loss": 4.5095, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7359376504802594e-05, |
| "loss": 4.5125, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7350990557292074e-05, |
| "loss": 4.5164, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7342604609781554e-05, |
| "loss": 4.511, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7334218662271034e-05, |
| "loss": 4.5335, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7325832714760514e-05, |
| "loss": 4.5125, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317446767249994e-05, |
| "loss": 4.5184, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7309060819739474e-05, |
| "loss": 4.5104, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7300674872228953e-05, |
| "loss": 4.5194, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729230530352217e-05, |
| "loss": 4.5091, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728391935601165e-05, |
| "loss": 4.5036, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727553340850113e-05, |
| "loss": 4.5065, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726716383979434e-05, |
| "loss": 4.5019, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725877789228382e-05, |
| "loss": 4.4872, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.72503919447733e-05, |
| "loss": 4.5022, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724200599726278e-05, |
| "loss": 4.5084, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723362004975226e-05, |
| "loss": 4.5033, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722523410224174e-05, |
| "loss": 4.501, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721684815473121e-05, |
| "loss": 4.5011, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720847858602443e-05, |
| "loss": 4.4949, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720009263851391e-05, |
| "loss": 4.4915, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719170669100339e-05, |
| "loss": 4.4853, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718332074349287e-05, |
| "loss": 4.4841, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717493479598235e-05, |
| "loss": 4.4867, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.716654884847183e-05, |
| "loss": 4.4856, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.715816290096131e-05, |
| "loss": 4.4935, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7149793332254516e-05, |
| "loss": 4.4937, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7141407384743996e-05, |
| "loss": 4.4824, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7133021437233476e-05, |
| "loss": 4.4875, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7124635489722956e-05, |
| "loss": 4.4918, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7116249542212436e-05, |
| "loss": 4.4752, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7107879973505645e-05, |
| "loss": 4.488, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7099494025995125e-05, |
| "loss": 4.482, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7091108078484605e-05, |
| "loss": 4.4618, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708272213097409e-05, |
| "loss": 4.484, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707433618346357e-05, |
| "loss": 4.476, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706595023595305e-05, |
| "loss": 4.4713, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705758066724626e-05, |
| "loss": 4.467, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704919471973574e-05, |
| "loss": 4.4705, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704080877222522e-05, |
| "loss": 4.4588, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70324228247147e-05, |
| "loss": 4.4879, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702403687720418e-05, |
| "loss": 4.4663, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701565092969366e-05, |
| "loss": 4.4757, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700726498218314e-05, |
| "loss": 4.4764, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699887903467262e-05, |
| "loss": 4.4603, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699050946596583e-05, |
| "loss": 4.4531, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698212351845531e-05, |
| "loss": 4.4687, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697373757094479e-05, |
| "loss": 4.4556, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6965351623434276e-05, |
| "loss": 4.4501, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6956965675923756e-05, |
| "loss": 4.4697, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6948596107216965e-05, |
| "loss": 4.4685, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6940210159706445e-05, |
| "loss": 4.453, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6931824212195925e-05, |
| "loss": 4.4466, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69234382646854e-05, |
| "loss": 4.4484, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6915068695978614e-05, |
| "loss": 4.4571, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6906682748468094e-05, |
| "loss": 4.4612, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68983131797613e-05, |
| "loss": 4.4581, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688992723225078e-05, |
| "loss": 4.4618, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688154128474026e-05, |
| "loss": 4.4599, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687315533722974e-05, |
| "loss": 4.4621, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686476938971923e-05, |
| "loss": 4.4447, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.68563834422087e-05, |
| "loss": 4.4505, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684799749469818e-05, |
| "loss": 4.4549, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683961154718766e-05, |
| "loss": 4.4347, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683124197848087e-05, |
| "loss": 4.4495, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682285603097035e-05, |
| "loss": 4.4557, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681448646226357e-05, |
| "loss": 4.454, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680610051475305e-05, |
| "loss": 4.4393, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679771456724252e-05, |
| "loss": 4.4268, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6789328619732e-05, |
| "loss": 4.4463, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678094267222148e-05, |
| "loss": 4.4372, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677255672471097e-05, |
| "loss": 4.4525, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676417077720045e-05, |
| "loss": 4.4424, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755801208493657e-05, |
| "loss": 4.4447, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6747415260983136e-05, |
| "loss": 4.4396, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6739029313472616e-05, |
| "loss": 4.429, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6730643365962096e-05, |
| "loss": 4.4384, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722257418451576e-05, |
| "loss": 4.4359, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713887849744785e-05, |
| "loss": 4.4466, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705501902234265e-05, |
| "loss": 4.4483, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697115954723745e-05, |
| "loss": 4.4391, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688730007213225e-05, |
| "loss": 4.4268, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6680344059702705e-05, |
| "loss": 4.4306, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667197449099592e-05, |
| "loss": 4.4308, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66635885434854e-05, |
| "loss": 4.4381, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665520259597488e-05, |
| "loss": 4.4259, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664681664846436e-05, |
| "loss": 4.429, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663843070095384e-05, |
| "loss": 4.4182, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663004475344332e-05, |
| "loss": 4.4377, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66216588059328e-05, |
| "loss": 4.4097, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661327285842228e-05, |
| "loss": 4.4255, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660490328971549e-05, |
| "loss": 4.4168, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659651734220497e-05, |
| "loss": 4.4227, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658814777349818e-05, |
| "loss": 4.4189, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657976182598766e-05, |
| "loss": 4.4286, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657137587847714e-05, |
| "loss": 4.4218, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656298993096662e-05, |
| "loss": 4.4156, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65546039834561e-05, |
| "loss": 4.4232, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6546218035945586e-05, |
| "loss": 4.4093, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6537832088435065e-05, |
| "loss": 4.4272, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529462519728275e-05, |
| "loss": 4.4325, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6521076572217755e-05, |
| "loss": 4.4164, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512690624707234e-05, |
| "loss": 4.4114, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650430467719671e-05, |
| "loss": 4.4163, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649591872968619e-05, |
| "loss": 4.4182, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648753278217567e-05, |
| "loss": 4.4116, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647914683466515e-05, |
| "loss": 4.409, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647076088715463e-05, |
| "loss": 4.4077, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462391318447837e-05, |
| "loss": 4.4228, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645400537093732e-05, |
| "loss": 4.4132, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64456194234268e-05, |
| "loss": 4.4082, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643723347591628e-05, |
| "loss": 4.3981, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642886390720949e-05, |
| "loss": 4.4092, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642049433850271e-05, |
| "loss": 4.3982, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641210839099218e-05, |
| "loss": 4.4197, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640372244348166e-05, |
| "loss": 4.4142, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639533649597114e-05, |
| "loss": 4.4004, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638695054846062e-05, |
| "loss": 4.3987, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63785646009501e-05, |
| "loss": 4.4089, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637017865343958e-05, |
| "loss": 4.4054, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636180908473279e-05, |
| "loss": 4.416, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635342313722228e-05, |
| "loss": 4.3962, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634503718971176e-05, |
| "loss": 4.409, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633665124220124e-05, |
| "loss": 4.4051, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632826529469072e-05, |
| "loss": 4.4023, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6319895725983926e-05, |
| "loss": 4.4018, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6311509778473406e-05, |
| "loss": 4.4052, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6303123830962886e-05, |
| "loss": 4.3922, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6294737883452366e-05, |
| "loss": 4.3982, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6286351935941846e-05, |
| "loss": 4.3989, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277965988431326e-05, |
| "loss": 4.3928, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269580040920806e-05, |
| "loss": 4.392, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261194093410286e-05, |
| "loss": 4.3987, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252824524703495e-05, |
| "loss": 4.3969, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.366720676422119, |
| "eval_runtime": 289.5048, |
| "eval_samples_per_second": 1318.082, |
| "eval_steps_per_second": 41.191, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6244438577192975e-05, |
| "loss": 4.3889, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623605262968246e-05, |
| "loss": 4.3823, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622766668217194e-05, |
| "loss": 4.4056, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621929711346515e-05, |
| "loss": 4.3848, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621091116595463e-05, |
| "loss": 4.4053, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620252521844411e-05, |
| "loss": 4.3792, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619413927093359e-05, |
| "loss": 4.3985, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61857697022268e-05, |
| "loss": 4.3824, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617738375471628e-05, |
| "loss": 4.3863, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616899780720576e-05, |
| "loss": 4.3893, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616061185969524e-05, |
| "loss": 4.3889, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615224229098845e-05, |
| "loss": 4.3956, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6143872722281664e-05, |
| "loss": 4.3801, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6135486774771144e-05, |
| "loss": 4.3781, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6127100827260624e-05, |
| "loss": 4.3828, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6118714879750104e-05, |
| "loss": 4.3699, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6110328932239584e-05, |
| "loss": 4.3798, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6101942984729064e-05, |
| "loss": 4.3834, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6093557037218544e-05, |
| "loss": 4.3773, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.608517108970802e-05, |
| "loss": 4.3973, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.607680152100123e-05, |
| "loss": 4.3847, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606841557349071e-05, |
| "loss": 4.3893, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606002962598019e-05, |
| "loss": 4.3779, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6051643678469666e-05, |
| "loss": 4.3846, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.604325773095915e-05, |
| "loss": 4.3797, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.603487178344863e-05, |
| "loss": 4.3739, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.602648583593811e-05, |
| "loss": 4.3773, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.601809988842759e-05, |
| "loss": 4.3702, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.600974669852454e-05, |
| "loss": 4.3643, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600136075101402e-05, |
| "loss": 4.3692, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599297480350349e-05, |
| "loss": 4.3771, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598458885599297e-05, |
| "loss": 4.377, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597620290848245e-05, |
| "loss": 4.3738, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.596783333977567e-05, |
| "loss": 4.3764, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595944739226514e-05, |
| "loss": 4.3613, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.595106144475462e-05, |
| "loss": 4.3664, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.59426754972441e-05, |
| "loss": 4.3655, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5934289549733587e-05, |
| "loss": 4.3552, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5925919981026796e-05, |
| "loss": 4.3642, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5917534033516276e-05, |
| "loss": 4.3588, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5909148086005756e-05, |
| "loss": 4.3704, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5900762138495235e-05, |
| "loss": 4.3693, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5892376190984715e-05, |
| "loss": 4.3644, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5883990243474195e-05, |
| "loss": 4.361, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5875604295963675e-05, |
| "loss": 4.3729, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5867218348453155e-05, |
| "loss": 4.3544, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5858865158550094e-05, |
| "loss": 4.3659, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5850479211039573e-05, |
| "loss": 4.3599, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5842093263529053e-05, |
| "loss": 4.3423, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.583370731601854e-05, |
| "loss": 4.3669, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582532136850802e-05, |
| "loss": 4.352, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.58169354209975e-05, |
| "loss": 4.3565, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580854947348698e-05, |
| "loss": 4.3435, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580016352597646e-05, |
| "loss": 4.3514, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579179395726967e-05, |
| "loss": 4.3398, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.578340800975915e-05, |
| "loss": 4.3646, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577502206224863e-05, |
| "loss": 4.3506, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.576663611473811e-05, |
| "loss": 4.3556, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575826654603132e-05, |
| "loss": 4.3605, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.57498805985208e-05, |
| "loss": 4.3462, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574149465101028e-05, |
| "loss": 4.3347, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.573310870349976e-05, |
| "loss": 4.3557, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572472275598924e-05, |
| "loss": 4.3381, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5716353187282454e-05, |
| "loss": 4.3373, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5707967239771934e-05, |
| "loss": 4.354, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5699581292261414e-05, |
| "loss": 4.3518, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5691195344750894e-05, |
| "loss": 4.3405, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5682809397240374e-05, |
| "loss": 4.3363, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5674423449729854e-05, |
| "loss": 4.331, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.566603750221933e-05, |
| "loss": 4.341, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.565765155470881e-05, |
| "loss": 4.349, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564928198600202e-05, |
| "loss": 4.3481, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5640896038491496e-05, |
| "loss": 4.348, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.563252646978471e-05, |
| "loss": 4.3436, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.562414052227419e-05, |
| "loss": 4.3567, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.561575457476368e-05, |
| "loss": 4.3319, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.560736862725315e-05, |
| "loss": 4.3445, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559898267974263e-05, |
| "loss": 4.3403, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559059673223211e-05, |
| "loss": 4.3236, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.558222716352533e-05, |
| "loss": 4.3454, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.55738412160148e-05, |
| "loss": 4.3447, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.556545526850428e-05, |
| "loss": 4.34, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.555706932099376e-05, |
| "loss": 4.3322, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554868337348324e-05, |
| "loss": 4.3233, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.554029742597272e-05, |
| "loss": 4.3375, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.553192785726593e-05, |
| "loss": 4.3349, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5523541909755416e-05, |
| "loss": 4.3414, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5515155962244896e-05, |
| "loss": 4.3309, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5506770014734376e-05, |
| "loss": 4.3407, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5498384067223856e-05, |
| "loss": 4.3276, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5490014498517065e-05, |
| "loss": 4.3284, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5481628551006545e-05, |
| "loss": 4.3281, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5473242603496025e-05, |
| "loss": 4.3271, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5464856655985505e-05, |
| "loss": 4.344, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5456470708474985e-05, |
| "loss": 4.3401, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5448084760964465e-05, |
| "loss": 4.3402, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5439698813453945e-05, |
| "loss": 4.3196, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5431329244747154e-05, |
| "loss": 4.3279, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5422943297236634e-05, |
| "loss": 4.3273, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5414557349726114e-05, |
| "loss": 4.3318, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5406171402215594e-05, |
| "loss": 4.325, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.539778545470508e-05, |
| "loss": 4.3228, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538941588599829e-05, |
| "loss": 4.3162, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.538102993848777e-05, |
| "loss": 4.3378, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.537264399097725e-05, |
| "loss": 4.3069, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.536425804346673e-05, |
| "loss": 4.3214, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535588847475994e-05, |
| "loss": 4.3184, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.534750252724942e-05, |
| "loss": 4.3202, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.53391165797389e-05, |
| "loss": 4.3169, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533073063222838e-05, |
| "loss": 4.3299, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532234468471786e-05, |
| "loss": 4.3208, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531395873720733e-05, |
| "loss": 4.3117, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.530557278969682e-05, |
| "loss": 4.3274, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.52971868421863e-05, |
| "loss": 4.3115, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5288817273479514e-05, |
| "loss": 4.3247, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528043132596899e-05, |
| "loss": 4.3332, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.527204537845847e-05, |
| "loss": 4.3209, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.526367580975168e-05, |
| "loss": 4.3148, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.525528986224116e-05, |
| "loss": 4.3148, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5246903914730636e-05, |
| "loss": 4.3177, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238517967220116e-05, |
| "loss": 4.3148, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5230132019709596e-05, |
| "loss": 4.3174, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221746072199076e-05, |
| "loss": 4.3039, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213360124688556e-05, |
| "loss": 4.3252, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5204974177178036e-05, |
| "loss": 4.3191, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.519660460847125e-05, |
| "loss": 4.314, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518821866096073e-05, |
| "loss": 4.2988, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517983271345021e-05, |
| "loss": 4.3145, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517144676593969e-05, |
| "loss": 4.3048, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.51630771972329e-05, |
| "loss": 4.3191, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515469124972238e-05, |
| "loss": 4.3221, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514630530221186e-05, |
| "loss": 4.3113, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513791935470134e-05, |
| "loss": 4.2971, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512954978599455e-05, |
| "loss": 4.3141, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512116383848403e-05, |
| "loss": 4.3074, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511277789097351e-05, |
| "loss": 4.3235, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510439194346299e-05, |
| "loss": 4.2996, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509600599595247e-05, |
| "loss": 4.3125, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5087620048441956e-05, |
| "loss": 4.3132, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5079234100931436e-05, |
| "loss": 4.3147, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5070848153420916e-05, |
| "loss": 4.3085, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5062478584714125e-05, |
| "loss": 4.3104, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5054092637203605e-05, |
| "loss": 4.2986, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5045706689693085e-05, |
| "loss": 4.3088, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5037320742182565e-05, |
| "loss": 4.3063, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5028951173475774e-05, |
| "loss": 4.3043, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5020565225965254e-05, |
| "loss": 4.2991, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5012179278454734e-05, |
| "loss": 4.3092, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5003793330944214e-05, |
| "loss": 4.3045, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.283578872680664, |
| "eval_runtime": 289.1657, |
| "eval_samples_per_second": 1319.628, |
| "eval_steps_per_second": 41.239, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4995407383433694e-05, |
| "loss": 4.2952, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4987021435923174e-05, |
| "loss": 4.2927, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4978635488412654e-05, |
| "loss": 4.3131, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4970249540902134e-05, |
| "loss": 4.2974, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4961863593391614e-05, |
| "loss": 4.3094, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4953477645881094e-05, |
| "loss": 4.2973, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4945091698370574e-05, |
| "loss": 4.3028, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4936705750860054e-05, |
| "loss": 4.2954, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4928319803349534e-05, |
| "loss": 4.2965, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4919933855839014e-05, |
| "loss": 4.3007, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4911547908328494e-05, |
| "loss": 4.3013, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4903161960817974e-05, |
| "loss": 4.3081, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.489479239211118e-05, |
| "loss": 4.2918, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.488640644460066e-05, |
| "loss": 4.2911, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.487802049709014e-05, |
| "loss": 4.2929, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486963454957962e-05, |
| "loss": 4.2816, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48612486020691e-05, |
| "loss": 4.2918, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485286265455859e-05, |
| "loss": 4.2952, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484447670704806e-05, |
| "loss": 4.29, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483609075953754e-05, |
| "loss": 4.311, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.482770481202702e-05, |
| "loss": 4.2976, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.48193188645165e-05, |
| "loss": 4.3043, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481094929580971e-05, |
| "loss": 4.294, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480256334829919e-05, |
| "loss": 4.3001, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479417740078867e-05, |
| "loss": 4.29, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478579145327815e-05, |
| "loss": 4.2928, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477740550576763e-05, |
| "loss": 4.2898, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476901955825711e-05, |
| "loss": 4.2864, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476063361074659e-05, |
| "loss": 4.2821, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475224766323607e-05, |
| "loss": 4.2835, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474387809452929e-05, |
| "loss": 4.2897, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473549214701877e-05, |
| "loss": 4.2953, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472710619950825e-05, |
| "loss": 4.2896, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471872025199773e-05, |
| "loss": 4.2921, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4710350683290936e-05, |
| "loss": 4.2777, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4701964735780416e-05, |
| "loss": 4.2841, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4693578788269896e-05, |
| "loss": 4.2826, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4685192840759376e-05, |
| "loss": 4.2713, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4676806893248856e-05, |
| "loss": 4.2843, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4668453703345794e-05, |
| "loss": 4.274, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466006775583528e-05, |
| "loss": 4.2891, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465168180832476e-05, |
| "loss": 4.2866, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464329586081424e-05, |
| "loss": 4.2819, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463490991330372e-05, |
| "loss": 4.2746, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.46265239657932e-05, |
| "loss": 4.2928, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461813801828268e-05, |
| "loss": 4.272, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460975207077216e-05, |
| "loss": 4.2865, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460138250206537e-05, |
| "loss": 4.2761, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459299655455485e-05, |
| "loss": 4.2603, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458461060704433e-05, |
| "loss": 4.2859, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457622465953381e-05, |
| "loss": 4.2719, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.456783871202329e-05, |
| "loss": 4.2742, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455945276451277e-05, |
| "loss": 4.2676, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455108319580598e-05, |
| "loss": 4.2663, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542697248295465e-05, |
| "loss": 4.265, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534327679588674e-05, |
| "loss": 4.2775, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4525941732078154e-05, |
| "loss": 4.2743, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451757216337136e-05, |
| "loss": 4.2762, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450918621586084e-05, |
| "loss": 4.2811, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450080026835032e-05, |
| "loss": 4.2695, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44924143208398e-05, |
| "loss": 4.2562, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448402837332928e-05, |
| "loss": 4.2777, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447564242581876e-05, |
| "loss": 4.2611, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446725647830824e-05, |
| "loss": 4.2558, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4458870530797716e-05, |
| "loss": 4.2786, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44504845832872e-05, |
| "loss": 4.275, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444211501458041e-05, |
| "loss": 4.2636, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44337290670699e-05, |
| "loss": 4.2599, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442534311955937e-05, |
| "loss": 4.2511, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441695717204885e-05, |
| "loss": 4.2637, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440857122453833e-05, |
| "loss": 4.2746, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440018527702781e-05, |
| "loss": 4.2705, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439179932951729e-05, |
| "loss": 4.2683, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438341338200677e-05, |
| "loss": 4.2664, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437504381329998e-05, |
| "loss": 4.2836, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436665786578946e-05, |
| "loss": 4.2559, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435827191827894e-05, |
| "loss": 4.2696, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434988597076842e-05, |
| "loss": 4.2667, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434151640206164e-05, |
| "loss": 4.242, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4333130454551117e-05, |
| "loss": 4.273, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4324744507040597e-05, |
| "loss": 4.2668, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4316358559530076e-05, |
| "loss": 4.2655, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4307988990823286e-05, |
| "loss": 4.2595, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4299603043312766e-05, |
| "loss": 4.2449, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4291233474605975e-05, |
| "loss": 4.2619, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4282847527095455e-05, |
| "loss": 4.2599, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4274461579584935e-05, |
| "loss": 4.2684, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4266075632074415e-05, |
| "loss": 4.2563, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4257689684563894e-05, |
| "loss": 4.2646, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4249303737053374e-05, |
| "loss": 4.2539, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424093416834659e-05, |
| "loss": 4.2571, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.423254822083607e-05, |
| "loss": 4.2528, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.422416227332555e-05, |
| "loss": 4.2526, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.421577632581503e-05, |
| "loss": 4.2698, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.420739037830451e-05, |
| "loss": 4.2666, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419900443079399e-05, |
| "loss": 4.2709, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419061848328347e-05, |
| "loss": 4.2437, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418223253577295e-05, |
| "loss": 4.2552, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417384658826243e-05, |
| "loss": 4.2551, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416549339835937e-05, |
| "loss": 4.2588, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415710745084885e-05, |
| "loss": 4.2563, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414872150333833e-05, |
| "loss": 4.2482, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414033555582781e-05, |
| "loss": 4.2473, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413194960831729e-05, |
| "loss": 4.2641, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4123563660806775e-05, |
| "loss": 4.2362, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4115177713296255e-05, |
| "loss": 4.2504, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4106791765785735e-05, |
| "loss": 4.2429, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4098422197078944e-05, |
| "loss": 4.2551, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409005262837215e-05, |
| "loss": 4.2465, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408166668086163e-05, |
| "loss": 4.2559, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407328073335111e-05, |
| "loss": 4.2511, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.406489478584059e-05, |
| "loss": 4.2421, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.405650883833007e-05, |
| "loss": 4.2569, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404813926962328e-05, |
| "loss": 4.2432, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403975332211276e-05, |
| "loss": 4.2556, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403136737460224e-05, |
| "loss": 4.261, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402298142709173e-05, |
| "loss": 4.2496, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40145954795812e-05, |
| "loss": 4.2446, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400620953207068e-05, |
| "loss": 4.247, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39978399633639e-05, |
| "loss": 4.2464, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398945401585338e-05, |
| "loss": 4.2472, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398106806834285e-05, |
| "loss": 4.2479, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397268212083233e-05, |
| "loss": 4.2385, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396429617332181e-05, |
| "loss": 4.2559, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395591022581129e-05, |
| "loss": 4.2493, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.39475406571045e-05, |
| "loss": 4.2494, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393915470959398e-05, |
| "loss": 4.2331, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3930768762083466e-05, |
| "loss": 4.244, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3922382814572946e-05, |
| "loss": 4.2363, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3913996867062426e-05, |
| "loss": 4.2493, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3905610919551906e-05, |
| "loss": 4.252, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3897224972041386e-05, |
| "loss": 4.2466, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3888839024530866e-05, |
| "loss": 4.2279, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3880469455824075e-05, |
| "loss": 4.249, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872083508313555e-05, |
| "loss": 4.2417, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3863697560803035e-05, |
| "loss": 4.2538, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3855311613292515e-05, |
| "loss": 4.2329, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3846942044585724e-05, |
| "loss": 4.2476, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3838556097075204e-05, |
| "loss": 4.2448, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3830170149564684e-05, |
| "loss": 4.2477, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3821784202054164e-05, |
| "loss": 4.2408, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3813398254543644e-05, |
| "loss": 4.2429, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380501230703313e-05, |
| "loss": 4.2313, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.379662635952261e-05, |
| "loss": 4.243, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378824041201209e-05, |
| "loss": 4.2408, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.37798708433053e-05, |
| "loss": 4.2386, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377148489579478e-05, |
| "loss": 4.2339, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.376311532708799e-05, |
| "loss": 4.2408, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375472937957747e-05, |
| "loss": 4.2419, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.226221561431885, |
| "eval_runtime": 313.8499, |
| "eval_samples_per_second": 1215.839, |
| "eval_steps_per_second": 37.996, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.374634343206695e-05, |
| "loss": 4.2363, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.373795748455643e-05, |
| "loss": 4.2276, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372957153704591e-05, |
| "loss": 4.2467, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.372118558953539e-05, |
| "loss": 4.2317, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.371279964202487e-05, |
| "loss": 4.2425, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370441369451435e-05, |
| "loss": 4.2353, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3696044125807564e-05, |
| "loss": 4.2354, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368765817829704e-05, |
| "loss": 4.2354, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367928860959025e-05, |
| "loss": 4.2322, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.367091904088346e-05, |
| "loss": 4.2366, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.366253309337294e-05, |
| "loss": 4.2404, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.365414714586242e-05, |
| "loss": 4.2396, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36457611983519e-05, |
| "loss": 4.23, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363737525084138e-05, |
| "loss": 4.2267, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362898930333086e-05, |
| "loss": 4.2304, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3620603355820335e-05, |
| "loss": 4.2144, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361221740830982e-05, |
| "loss": 4.2298, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360384783960304e-05, |
| "loss": 4.2297, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359546189209251e-05, |
| "loss": 4.2289, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.358707594458199e-05, |
| "loss": 4.2471, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357868999707147e-05, |
| "loss": 4.2371, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357030404956095e-05, |
| "loss": 4.2418, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.356191810205043e-05, |
| "loss": 4.2327, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.355354853334364e-05, |
| "loss": 4.2333, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.354516258583312e-05, |
| "loss": 4.229, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.35367766383226e-05, |
| "loss": 4.2301, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352839069081208e-05, |
| "loss": 4.2259, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.352000474330156e-05, |
| "loss": 4.227, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.351161879579104e-05, |
| "loss": 4.2217, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.350323284828052e-05, |
| "loss": 4.2237, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3494846900770006e-05, |
| "loss": 4.2269, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3486477332063216e-05, |
| "loss": 4.2327, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3478091384552696e-05, |
| "loss": 4.2276, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3469705437042175e-05, |
| "loss": 4.2332, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3461319489531655e-05, |
| "loss": 4.2162, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3452949920824865e-05, |
| "loss": 4.2246, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3444563973314344e-05, |
| "loss": 4.2216, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3436178025803824e-05, |
| "loss": 4.2117, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3427792078293304e-05, |
| "loss": 4.2251, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3419422509586514e-05, |
| "loss": 4.2092, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3411036562075993e-05, |
| "loss": 4.2263, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3402650614565473e-05, |
| "loss": 4.2268, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.339429742466242e-05, |
| "loss": 4.226, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.33859114771519e-05, |
| "loss": 4.219, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.337752552964138e-05, |
| "loss": 4.2316, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.336913958213086e-05, |
| "loss": 4.2112, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.336077001342407e-05, |
| "loss": 4.2304, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.335238406591355e-05, |
| "loss": 4.2167, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.334399811840303e-05, |
| "loss": 4.1958, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.333561217089251e-05, |
| "loss": 4.2308, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.332722622338199e-05, |
| "loss": 4.2125, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331884027587147e-05, |
| "loss": 4.2197, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331045432836095e-05, |
| "loss": 4.2062, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330206838085043e-05, |
| "loss": 4.2085, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3293682433339914e-05, |
| "loss": 4.2118, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3285296485829394e-05, |
| "loss": 4.2142, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3276910538318874e-05, |
| "loss": 4.2187, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326852459080835e-05, |
| "loss": 4.2153, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326013864329783e-05, |
| "loss": 4.2259, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325175269578731e-05, |
| "loss": 4.2097, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324336674827679e-05, |
| "loss": 4.1991, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3234997179569996e-05, |
| "loss": 4.2188, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.322662761086321e-05, |
| "loss": 4.1981, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.321824166335269e-05, |
| "loss": 4.2006, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32098720946459e-05, |
| "loss": 4.22, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.320150252593912e-05, |
| "loss": 4.2183, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.31931165784286e-05, |
| "loss": 4.204, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3184730630918077e-05, |
| "loss": 4.2064, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3176344683407557e-05, |
| "loss": 4.195, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3167958735897036e-05, |
| "loss": 4.2059, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3159572788386516e-05, |
| "loss": 4.2162, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3151186840875996e-05, |
| "loss": 4.2136, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.314280089336547e-05, |
| "loss": 4.213, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.313441494585495e-05, |
| "loss": 4.2097, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.312602899834443e-05, |
| "loss": 4.2257, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311764305083391e-05, |
| "loss": 4.1977, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310925710332339e-05, |
| "loss": 4.2133, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310087115581287e-05, |
| "loss": 4.2097, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3092501587106085e-05, |
| "loss": 4.1898, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3084115639595565e-05, |
| "loss": 4.2147, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3075729692085045e-05, |
| "loss": 4.212, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3067343744574525e-05, |
| "loss": 4.2096, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3058957797064005e-05, |
| "loss": 4.2062, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3050571849553485e-05, |
| "loss": 4.1941, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3042202280846694e-05, |
| "loss": 4.2, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3033816333336174e-05, |
| "loss": 4.2057, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.302544676462938e-05, |
| "loss": 4.2132, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.301706081711886e-05, |
| "loss": 4.2, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.300867486960834e-05, |
| "loss": 4.2112, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.300028892209782e-05, |
| "loss": 4.1988, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.29919029745873e-05, |
| "loss": 4.2048, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.298351702707678e-05, |
| "loss": 4.1974, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.297513107956627e-05, |
| "loss": 4.2007, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.296676151085948e-05, |
| "loss": 4.2156, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295837556334896e-05, |
| "loss": 4.2127, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294998961583844e-05, |
| "loss": 4.2156, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294160366832792e-05, |
| "loss": 4.1881, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.29332177208174e-05, |
| "loss": 4.2026, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.292483177330688e-05, |
| "loss": 4.1981, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291644582579636e-05, |
| "loss": 4.2083, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.290805987828583e-05, |
| "loss": 4.1981, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289967393077531e-05, |
| "loss": 4.1996, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.289128798326479e-05, |
| "loss": 4.1911, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.288290203575427e-05, |
| "loss": 4.2079, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.287451608824375e-05, |
| "loss": 4.1833, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.286614651953697e-05, |
| "loss": 4.2001, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.285776057202645e-05, |
| "loss": 4.1848, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284937462451593e-05, |
| "loss": 4.2044, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284098867700541e-05, |
| "loss": 4.1956, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283260272949489e-05, |
| "loss": 4.1979, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2824233160788096e-05, |
| "loss": 4.1962, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2815847213277576e-05, |
| "loss": 4.1962, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2807461265767056e-05, |
| "loss": 4.1998, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2799075318256536e-05, |
| "loss": 4.1923, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2790689370746016e-05, |
| "loss": 4.1982, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2782303423235496e-05, |
| "loss": 4.213, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2773917475724976e-05, |
| "loss": 4.1994, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276554790701819e-05, |
| "loss": 4.191, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275716195950767e-05, |
| "loss": 4.1968, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274877601199715e-05, |
| "loss": 4.1958, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.274039006448663e-05, |
| "loss": 4.189, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.273200411697611e-05, |
| "loss": 4.1989, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.272361816946559e-05, |
| "loss": 4.1855, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.271523222195507e-05, |
| "loss": 4.203, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2706846274444545e-05, |
| "loss": 4.2019, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269847670573776e-05, |
| "loss": 4.192, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269009075822724e-05, |
| "loss": 4.1841, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268172118952045e-05, |
| "loss": 4.1908, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267333524200993e-05, |
| "loss": 4.1903, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.266494929449941e-05, |
| "loss": 4.1986, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.265656334698889e-05, |
| "loss": 4.1957, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264817739947837e-05, |
| "loss": 4.1983, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.263979145196785e-05, |
| "loss": 4.1767, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2631421883261065e-05, |
| "loss": 4.1984, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2623035935750545e-05, |
| "loss": 4.1912, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.261464998824002e-05, |
| "loss": 4.2025, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26062640407295e-05, |
| "loss": 4.1813, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.259787809321898e-05, |
| "loss": 4.2015, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258949214570846e-05, |
| "loss": 4.1912, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258110619819794e-05, |
| "loss": 4.1988, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.257273662949115e-05, |
| "loss": 4.1854, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.256435068198063e-05, |
| "loss": 4.2003, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.255596473447011e-05, |
| "loss": 4.1774, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2547578786959594e-05, |
| "loss": 4.1944, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.25392092182528e-05, |
| "loss": 4.1918, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253082327074228e-05, |
| "loss": 4.1856, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252243732323176e-05, |
| "loss": 4.1891, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251405137572124e-05, |
| "loss": 4.1859, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.250568180701445e-05, |
| "loss": 4.192, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.185070514678955, |
| "eval_runtime": 304.5595, |
| "eval_samples_per_second": 1252.928, |
| "eval_steps_per_second": 39.155, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.249729585950393e-05, |
| "loss": 4.1901, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248890991199341e-05, |
| "loss": 4.1763, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248052396448289e-05, |
| "loss": 4.1959, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247213801697237e-05, |
| "loss": 4.1826, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246375206946185e-05, |
| "loss": 4.1932, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245536612195133e-05, |
| "loss": 4.1856, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244698017444081e-05, |
| "loss": 4.1882, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243859422693029e-05, |
| "loss": 4.1875, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243020827941978e-05, |
| "loss": 4.1837, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242182233190926e-05, |
| "loss": 4.1877, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.241343638439873e-05, |
| "loss": 4.1903, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.240505043688821e-05, |
| "loss": 4.1896, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239668086818143e-05, |
| "loss": 4.1829, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238829492067091e-05, |
| "loss": 4.1786, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237990897316038e-05, |
| "loss": 4.1829, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237152302564986e-05, |
| "loss": 4.1661, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236313707813934e-05, |
| "loss": 4.1836, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235475113062882e-05, |
| "loss": 4.1799, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23463651831183e-05, |
| "loss": 4.1794, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233797923560778e-05, |
| "loss": 4.1995, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232959328809726e-05, |
| "loss": 4.1925, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232120734058675e-05, |
| "loss": 4.1943, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.231282139307623e-05, |
| "loss": 4.1808, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230443544556571e-05, |
| "loss": 4.1861, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2296065876858916e-05, |
| "loss": 4.1828, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2287679929348396e-05, |
| "loss": 4.1832, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2279293981837876e-05, |
| "loss": 4.18, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2270908034327356e-05, |
| "loss": 4.18, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2262522086816836e-05, |
| "loss": 4.1718, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2254152518110045e-05, |
| "loss": 4.1761, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2245766570599525e-05, |
| "loss": 4.1773, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2237380623089005e-05, |
| "loss": 4.1873, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2228994675578485e-05, |
| "loss": 4.1818, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.22206251068717e-05, |
| "loss": 4.1856, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221223915936118e-05, |
| "loss": 4.1724, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220386959065439e-05, |
| "loss": 4.1753, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219548364314387e-05, |
| "loss": 4.1778, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.218709769563335e-05, |
| "loss": 4.1676, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217871174812283e-05, |
| "loss": 4.1744, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217034217941604e-05, |
| "loss": 4.1685, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216195623190552e-05, |
| "loss": 4.1758, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2153570284395e-05, |
| "loss": 4.181, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214518433688448e-05, |
| "loss": 4.1774, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.213679838937396e-05, |
| "loss": 4.1747, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212842882066717e-05, |
| "loss": 4.1819, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212004287315665e-05, |
| "loss": 4.1707, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2111656925646134e-05, |
| "loss": 4.1804, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2103270978135614e-05, |
| "loss": 4.1728, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2094885030625094e-05, |
| "loss": 4.1485, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208649908311457e-05, |
| "loss": 4.1919, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207811313560405e-05, |
| "loss": 4.1631, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206972718809353e-05, |
| "loss": 4.1751, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.206134124058301e-05, |
| "loss": 4.1572, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2052971671876216e-05, |
| "loss": 4.1658, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.204461848197316e-05, |
| "loss": 4.1608, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.203623253446264e-05, |
| "loss": 4.1717, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.202784658695212e-05, |
| "loss": 4.1719, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20194606394416e-05, |
| "loss": 4.1719, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201107469193109e-05, |
| "loss": 4.1798, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200268874442057e-05, |
| "loss": 4.1652, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.199430279691004e-05, |
| "loss": 4.1532, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198591684939952e-05, |
| "loss": 4.179, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1977530901889e-05, |
| "loss": 4.1544, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196914495437848e-05, |
| "loss": 4.1525, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196075900686796e-05, |
| "loss": 4.1759, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195237305935744e-05, |
| "loss": 4.1745, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194400349065065e-05, |
| "loss": 4.1584, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193561754314013e-05, |
| "loss": 4.1665, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192723159562961e-05, |
| "loss": 4.1502, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191884564811909e-05, |
| "loss": 4.1617, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1910476079412306e-05, |
| "loss": 4.1699, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1902090131901786e-05, |
| "loss": 4.1687, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1893704184391266e-05, |
| "loss": 4.1731, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1885318236880746e-05, |
| "loss": 4.168, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1876948668173955e-05, |
| "loss": 4.18, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1868562720663435e-05, |
| "loss": 4.1532, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1860176773152915e-05, |
| "loss": 4.1721, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1851807204446124e-05, |
| "loss": 4.1615, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1843421256935604e-05, |
| "loss": 4.1522, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1835035309425084e-05, |
| "loss": 4.1664, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1826649361914564e-05, |
| "loss": 4.1726, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1818263414404044e-05, |
| "loss": 4.1659, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180989384569726e-05, |
| "loss": 4.1652, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180150789818674e-05, |
| "loss": 4.1486, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179312195067622e-05, |
| "loss": 4.1524, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.17847360031657e-05, |
| "loss": 4.1664, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177635005565518e-05, |
| "loss": 4.1692, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176796410814466e-05, |
| "loss": 4.1575, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.175957816063414e-05, |
| "loss": 4.1679, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175119221312362e-05, |
| "loss": 4.1506, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174282264441683e-05, |
| "loss": 4.1635, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173445307571004e-05, |
| "loss": 4.1557, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.172606712819952e-05, |
| "loss": 4.1589, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1717681180689e-05, |
| "loss": 4.1721, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170929523317848e-05, |
| "loss": 4.1715, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1700909285667964e-05, |
| "loss": 4.1737, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1692523338157444e-05, |
| "loss": 4.1456, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.168415376945065e-05, |
| "loss": 4.1609, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167576782194013e-05, |
| "loss": 4.1541, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166738187442961e-05, |
| "loss": 4.1645, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165901230572282e-05, |
| "loss": 4.1575, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16506263582123e-05, |
| "loss": 4.1593, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.164224041070178e-05, |
| "loss": 4.1507, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163385446319126e-05, |
| "loss": 4.1609, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162546851568074e-05, |
| "loss": 4.1445, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161708256817022e-05, |
| "loss": 4.1571, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16086966206597e-05, |
| "loss": 4.1421, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160031067314918e-05, |
| "loss": 4.1628, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159192472563866e-05, |
| "loss": 4.1535, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158353877812814e-05, |
| "loss": 4.1557, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.157515283061762e-05, |
| "loss": 4.1518, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15667668831071e-05, |
| "loss": 4.1572, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155838093559658e-05, |
| "loss": 4.1573, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155001136688979e-05, |
| "loss": 4.1509, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154162541937927e-05, |
| "loss": 4.1604, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153325585067248e-05, |
| "loss": 4.1668, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152486990316196e-05, |
| "loss": 4.1609, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151648395565144e-05, |
| "loss": 4.152, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150809800814092e-05, |
| "loss": 4.1505, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14997120606304e-05, |
| "loss": 4.1588, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149132611311988e-05, |
| "loss": 4.146, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1482940165609366e-05, |
| "loss": 4.1592, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1474554218098846e-05, |
| "loss": 4.1438, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1466184649392055e-05, |
| "loss": 4.161, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1457798701881535e-05, |
| "loss": 4.1584, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1449412754371015e-05, |
| "loss": 4.1538, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1441026806860495e-05, |
| "loss": 4.1439, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1432657238153704e-05, |
| "loss": 4.1491, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.142428766944691e-05, |
| "loss": 4.1491, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141590172193639e-05, |
| "loss": 4.1614, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140751577442587e-05, |
| "loss": 4.1558, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139912982691535e-05, |
| "loss": 4.1542, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139074387940483e-05, |
| "loss": 4.1386, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138237431069805e-05, |
| "loss": 4.1595, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137398836318753e-05, |
| "loss": 4.1502, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136560241567701e-05, |
| "loss": 4.1629, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.135721646816649e-05, |
| "loss": 4.1413, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134883052065597e-05, |
| "loss": 4.1606, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134044457314545e-05, |
| "loss": 4.1503, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.133205862563493e-05, |
| "loss": 4.1647, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.132367267812441e-05, |
| "loss": 4.1461, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131530310941762e-05, |
| "loss": 4.1554, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13069171619071e-05, |
| "loss": 4.1412, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129853121439658e-05, |
| "loss": 4.1533, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129016164568979e-05, |
| "loss": 4.1527, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1281775698179273e-05, |
| "loss": 4.1434, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273389750668753e-05, |
| "loss": 4.153, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.126500380315823e-05, |
| "loss": 4.1428, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1256617855647707e-05, |
| "loss": 4.1562, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.153366565704346, |
| "eval_runtime": 292.5709, |
| "eval_samples_per_second": 1304.268, |
| "eval_steps_per_second": 40.759, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1248231908137186e-05, |
| "loss": 4.151, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1239845960626666e-05, |
| "loss": 4.1365, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1231460013116146e-05, |
| "loss": 4.1536, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1223090444409356e-05, |
| "loss": 4.1435, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1214704496898835e-05, |
| "loss": 4.1557, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1206318549388315e-05, |
| "loss": 4.1487, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1197932601877795e-05, |
| "loss": 4.1473, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118956303317101e-05, |
| "loss": 4.1483, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.118117708566049e-05, |
| "loss": 4.1425, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.117279113814997e-05, |
| "loss": 4.1498, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.116440519063945e-05, |
| "loss": 4.151, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.115601924312893e-05, |
| "loss": 4.1513, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.114764967442214e-05, |
| "loss": 4.1474, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.113926372691162e-05, |
| "loss": 4.1381, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.11308777794011e-05, |
| "loss": 4.1399, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.112249183189058e-05, |
| "loss": 4.1312, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.111410588438006e-05, |
| "loss": 4.1422, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.110571993686954e-05, |
| "loss": 4.1461, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.109733398935902e-05, |
| "loss": 4.1368, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108896442065223e-05, |
| "loss": 4.1586, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108057847314171e-05, |
| "loss": 4.1539, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1072192525631196e-05, |
| "loss": 4.1564, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1063806578120676e-05, |
| "loss": 4.1443, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1055420630610156e-05, |
| "loss": 4.1441, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1047034683099636e-05, |
| "loss": 4.1518, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1038648735589115e-05, |
| "loss": 4.141, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1030262788078595e-05, |
| "loss": 4.1407, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1021893219371805e-05, |
| "loss": 4.1476, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1013507271861285e-05, |
| "loss": 4.1317, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1005121324350764e-05, |
| "loss": 4.1362, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0996751755643974e-05, |
| "loss": 4.1407, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0988365808133454e-05, |
| "loss": 4.1453, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0979979860622933e-05, |
| "loss": 4.1464, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0971593913112413e-05, |
| "loss": 4.1457, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096320796560189e-05, |
| "loss": 4.1336, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.095482201809137e-05, |
| "loss": 4.1361, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.094643607058085e-05, |
| "loss": 4.1438, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.093805012307033e-05, |
| "loss": 4.1282, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.092968055436354e-05, |
| "loss": 4.1376, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.092129460685302e-05, |
| "loss": 4.1285, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.091292503814624e-05, |
| "loss": 4.1415, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.090453909063572e-05, |
| "loss": 4.1374, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.089616952192893e-05, |
| "loss": 4.1405, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.088778357441841e-05, |
| "loss": 4.1397, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.087939762690789e-05, |
| "loss": 4.141, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.087101167939737e-05, |
| "loss": 4.1351, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.086262573188685e-05, |
| "loss": 4.1427, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.085423978437633e-05, |
| "loss": 4.1331, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.084585383686581e-05, |
| "loss": 4.114, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.083746788935529e-05, |
| "loss": 4.1514, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082908194184477e-05, |
| "loss": 4.1266, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.082069599433425e-05, |
| "loss": 4.1372, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.081231004682373e-05, |
| "loss": 4.1257, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080392409931321e-05, |
| "loss": 4.1262, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0795554530606416e-05, |
| "loss": 4.1219, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0787168583095896e-05, |
| "loss": 4.1332, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0778782635585376e-05, |
| "loss": 4.1382, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0770396688074856e-05, |
| "loss": 4.131, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0762010740564336e-05, |
| "loss": 4.1452, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.075365755066128e-05, |
| "loss": 4.1345, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.074527160315076e-05, |
| "loss": 4.114, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.073688565564024e-05, |
| "loss": 4.1383, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072849970812972e-05, |
| "loss": 4.1205, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.07201137606192e-05, |
| "loss": 4.1128, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071172781310868e-05, |
| "loss": 4.1434, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.070334186559816e-05, |
| "loss": 4.1361, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069495591808764e-05, |
| "loss": 4.1232, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.068658634938085e-05, |
| "loss": 4.1317, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067820040187033e-05, |
| "loss": 4.1159, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066981445435981e-05, |
| "loss": 4.1251, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066142850684929e-05, |
| "loss": 4.1314, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0653058938142505e-05, |
| "loss": 4.1338, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0644672990631985e-05, |
| "loss": 4.1357, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0636303421925194e-05, |
| "loss": 4.1303, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0627917474414674e-05, |
| "loss": 4.1457, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0619531526904154e-05, |
| "loss": 4.1165, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0611145579393634e-05, |
| "loss": 4.1381, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0602759631883114e-05, |
| "loss": 4.1235, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0594373684372594e-05, |
| "loss": 4.1216, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0585987736862074e-05, |
| "loss": 4.1312, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0577601789351554e-05, |
| "loss": 4.1357, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056921584184103e-05, |
| "loss": 4.1311, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056084627313424e-05, |
| "loss": 4.1353, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.055247670442746e-05, |
| "loss": 4.1121, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.054409075691694e-05, |
| "loss": 4.1175, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.053570480940642e-05, |
| "loss": 4.1298, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.05273188618959e-05, |
| "loss": 4.1359, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051893291438538e-05, |
| "loss": 4.1248, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051057972448232e-05, |
| "loss": 4.1311, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.05021937769718e-05, |
| "loss": 4.1156, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.049380782946128e-05, |
| "loss": 4.1296, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.048542188195076e-05, |
| "loss": 4.1213, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.047703593444024e-05, |
| "loss": 4.1225, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0468666365733446e-05, |
| "loss": 4.1362, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0460280418222926e-05, |
| "loss": 4.1362, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.045189447071241e-05, |
| "loss": 4.1406, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.044350852320189e-05, |
| "loss": 4.1155, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.043512257569137e-05, |
| "loss": 4.1235, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.042673662818085e-05, |
| "loss": 4.121, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0418350680670326e-05, |
| "loss": 4.1273, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0409964733159806e-05, |
| "loss": 4.122, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0401578785649285e-05, |
| "loss": 4.1281, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0393209216942495e-05, |
| "loss": 4.118, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0384823269431975e-05, |
| "loss": 4.1219, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0376437321921454e-05, |
| "loss": 4.118, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0368051374410934e-05, |
| "loss": 4.1218, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0359665426900414e-05, |
| "loss": 4.1083, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0351279479389894e-05, |
| "loss": 4.1285, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0342893531879374e-05, |
| "loss": 4.1208, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033450758436886e-05, |
| "loss": 4.1236, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.032613801566207e-05, |
| "loss": 4.1168, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.031775206815155e-05, |
| "loss": 4.1234, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030936612064103e-05, |
| "loss": 4.1251, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030098017313051e-05, |
| "loss": 4.1172, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.029259422561999e-05, |
| "loss": 4.1292, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028424103571693e-05, |
| "loss": 4.1309, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.027585508820641e-05, |
| "loss": 4.1214, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.026746914069589e-05, |
| "loss": 4.1263, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025908319318537e-05, |
| "loss": 4.1162, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025069724567485e-05, |
| "loss": 4.1201, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.024231129816433e-05, |
| "loss": 4.121, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0233925350653815e-05, |
| "loss": 4.1244, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0225539403143295e-05, |
| "loss": 4.1064, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0217153455632775e-05, |
| "loss": 4.1274, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0208783886925984e-05, |
| "loss": 4.1266, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0200397939415464e-05, |
| "loss": 4.1226, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0192011991904944e-05, |
| "loss": 4.1106, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0183626044394424e-05, |
| "loss": 4.1158, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.017527285449136e-05, |
| "loss": 4.1156, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.016688690698084e-05, |
| "loss": 4.1271, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015850095947032e-05, |
| "loss": 4.1222, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015013139076354e-05, |
| "loss": 4.1243, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014174544325302e-05, |
| "loss": 4.1038, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01333594957425e-05, |
| "loss": 4.1253, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.012497354823198e-05, |
| "loss": 4.119, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.011658760072146e-05, |
| "loss": 4.1292, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010820165321094e-05, |
| "loss": 4.1125, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009981570570042e-05, |
| "loss": 4.122, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.00914297581899e-05, |
| "loss": 4.1178, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.008304381067938e-05, |
| "loss": 4.1335, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.007465786316886e-05, |
| "loss": 4.114, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.006627191565833e-05, |
| "loss": 4.1216, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.005788596814781e-05, |
| "loss": 4.1156, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0049532778244755e-05, |
| "loss": 4.1214, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0041146830734235e-05, |
| "loss": 4.1145, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.003276088322372e-05, |
| "loss": 4.1113, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.00243749357132e-05, |
| "loss": 4.1214, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.001598898820268e-05, |
| "loss": 4.109, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.000760304069216e-05, |
| "loss": 4.1266, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.129226207733154, |
| "eval_runtime": 292.3983, |
| "eval_samples_per_second": 1305.038, |
| "eval_steps_per_second": 40.783, |
| "step": 610560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9999217093181635e-05, |
| "loss": 4.1134, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9990831145671115e-05, |
| "loss": 4.1079, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9982445198160595e-05, |
| "loss": 4.1177, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9974059250650075e-05, |
| "loss": 4.1133, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9965673303139555e-05, |
| "loss": 4.1248, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9957287355629035e-05, |
| "loss": 4.1156, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9948901408118515e-05, |
| "loss": 4.1175, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9940515460607995e-05, |
| "loss": 4.1188, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9932129513097475e-05, |
| "loss": 4.1091, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9923743565586955e-05, |
| "loss": 4.1192, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9915357618076435e-05, |
| "loss": 4.1202, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990697167056592e-05, |
| "loss": 4.1198, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989860210185913e-05, |
| "loss": 4.1183, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989021615434861e-05, |
| "loss": 4.1019, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.988183020683809e-05, |
| "loss": 4.1124, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.987344425932757e-05, |
| "loss": 4.0966, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865058311817044e-05, |
| "loss": 4.1139, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9856672364306523e-05, |
| "loss": 4.1141, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9848286416796003e-05, |
| "loss": 4.105, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9839900469285483e-05, |
| "loss": 4.128, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983151452177496e-05, |
| "loss": 4.1233, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982314495306817e-05, |
| "loss": 4.1238, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981475900555766e-05, |
| "loss": 4.115, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980637305804714e-05, |
| "loss": 4.1147, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979798711053662e-05, |
| "loss": 4.1209, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.97896011630261e-05, |
| "loss": 4.1083, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978121521551558e-05, |
| "loss": 4.108, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977284564680879e-05, |
| "loss": 4.1151, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976445969929827e-05, |
| "loss": 4.1026, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975607375178775e-05, |
| "loss": 4.1042, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974768780427723e-05, |
| "loss": 4.1124, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973930185676671e-05, |
| "loss": 4.1155, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973091590925619e-05, |
| "loss": 4.1096, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.972252996174567e-05, |
| "loss": 4.1211, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.971414401423515e-05, |
| "loss": 4.1048, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.970577444552836e-05, |
| "loss": 4.1052, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.969738849801784e-05, |
| "loss": 4.1125, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9689002550507324e-05, |
| "loss": 4.0967, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9680616602996804e-05, |
| "loss": 4.1101, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9672230655486283e-05, |
| "loss": 4.1027, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966386108677949e-05, |
| "loss": 4.106, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.965547513926897e-05, |
| "loss": 4.11, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964708919175845e-05, |
| "loss": 4.1091, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963870324424793e-05, |
| "loss": 4.1116, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9630317296737406e-05, |
| "loss": 4.1091, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9621931349226886e-05, |
| "loss": 4.1062, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.96135617805201e-05, |
| "loss": 4.1094, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960517583300958e-05, |
| "loss": 4.1092, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959678988549906e-05, |
| "loss": 4.0819, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958840393798854e-05, |
| "loss": 4.1171, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958001799047802e-05, |
| "loss": 4.0977, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957164842177124e-05, |
| "loss": 4.1104, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956326247426071e-05, |
| "loss": 4.0974, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955487652675019e-05, |
| "loss": 4.0958, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954649057923967e-05, |
| "loss": 4.0978, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953810463172915e-05, |
| "loss": 4.097, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952971868421863e-05, |
| "loss": 4.1106, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952133273670811e-05, |
| "loss": 4.0987, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.951294678919759e-05, |
| "loss": 4.1187, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.950456084168707e-05, |
| "loss": 4.1065, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949619127298028e-05, |
| "loss": 4.0846, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948780532546976e-05, |
| "loss": 4.1054, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9479419377959246e-05, |
| "loss": 4.0989, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9471033430448726e-05, |
| "loss": 4.0856, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9462647482938206e-05, |
| "loss": 4.1095, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454277914231415e-05, |
| "loss": 4.1044, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9445891966720895e-05, |
| "loss": 4.0953, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9437506019210375e-05, |
| "loss": 4.102, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9429120071699855e-05, |
| "loss": 4.0829, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9420734124189335e-05, |
| "loss": 4.0974, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9412364555482544e-05, |
| "loss": 4.1018, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9403978607972024e-05, |
| "loss": 4.1029, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9395592660461504e-05, |
| "loss": 4.1102, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9387206712950984e-05, |
| "loss": 4.0968, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9378820765440464e-05, |
| "loss": 4.1139, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9370434817929943e-05, |
| "loss": 4.0936, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936206524922316e-05, |
| "loss": 4.1067, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935367930171264e-05, |
| "loss": 4.0922, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934529335420212e-05, |
| "loss": 4.0909, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.933692378549533e-05, |
| "loss": 4.1035, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932853783798481e-05, |
| "loss": 4.1077, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932015189047429e-05, |
| "loss": 4.1018, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.931176594296377e-05, |
| "loss": 4.1062, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.930339637425698e-05, |
| "loss": 4.0816, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.929501042674646e-05, |
| "loss": 4.0872, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928662447923594e-05, |
| "loss": 4.1046, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927823853172542e-05, |
| "loss": 4.1029, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92698525842149e-05, |
| "loss": 4.0956, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926146663670438e-05, |
| "loss": 4.1028, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925308068919386e-05, |
| "loss": 4.0893, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924469474168334e-05, |
| "loss": 4.1004, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923630879417282e-05, |
| "loss": 4.0933, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9227939225466026e-05, |
| "loss": 4.0937, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9219553277955506e-05, |
| "loss": 4.1065, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9211167330444986e-05, |
| "loss": 4.1067, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9202781382934466e-05, |
| "loss": 4.1101, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9194395435423946e-05, |
| "loss": 4.0875, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9186009487913426e-05, |
| "loss": 4.0941, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9177623540402906e-05, |
| "loss": 4.0929, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9169237592892386e-05, |
| "loss": 4.0976, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9160851645381866e-05, |
| "loss": 4.0962, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9152465697871346e-05, |
| "loss": 4.0949, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914409612916456e-05, |
| "loss": 4.0944, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.913571018165404e-05, |
| "loss": 4.0906, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912732423414352e-05, |
| "loss": 4.0888, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911895466543673e-05, |
| "loss": 4.0924, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911056871792621e-05, |
| "loss": 4.0817, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910218277041569e-05, |
| "loss": 4.0977, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909379682290517e-05, |
| "loss": 4.0947, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908541087539465e-05, |
| "loss": 4.09, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907702492788413e-05, |
| "loss": 4.0941, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9068638980373604e-05, |
| "loss": 4.0914, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9060253032863083e-05, |
| "loss": 4.0977, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90518834641563e-05, |
| "loss": 4.0889, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9043513895449515e-05, |
| "loss": 4.102, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035127947938995e-05, |
| "loss": 4.1021, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9026742000428475e-05, |
| "loss": 4.0969, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9018356052917955e-05, |
| "loss": 4.0951, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900997010540743e-05, |
| "loss": 4.0901, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900158415789691e-05, |
| "loss": 4.0895, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.899319821038639e-05, |
| "loss": 4.0946, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.898481226287587e-05, |
| "loss": 4.0956, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.897644269416908e-05, |
| "loss": 4.0766, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896805674665856e-05, |
| "loss": 4.1032, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895967079914804e-05, |
| "loss": 4.0966, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8951284851637524e-05, |
| "loss": 4.0944, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894291528293073e-05, |
| "loss": 4.0854, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.893452933542021e-05, |
| "loss": 4.0872, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892614338790969e-05, |
| "loss": 4.0876, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891775744039917e-05, |
| "loss": 4.0974, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890938787169238e-05, |
| "loss": 4.0953, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890100192418186e-05, |
| "loss": 4.0959, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.889261597667134e-05, |
| "loss": 4.0753, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.888423002916082e-05, |
| "loss": 4.0994, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88758440816503e-05, |
| "loss": 4.091, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.886745813413978e-05, |
| "loss": 4.1043, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885907218662926e-05, |
| "loss": 4.0861, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885068623911874e-05, |
| "loss": 4.0907, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884231667041196e-05, |
| "loss": 4.0935, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.883393072290144e-05, |
| "loss": 4.1019, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882554477539092e-05, |
| "loss": 4.085, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88171588278804e-05, |
| "loss": 4.0957, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8808789259173606e-05, |
| "loss": 4.0884, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8800403311663086e-05, |
| "loss": 4.0979, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8792033742956296e-05, |
| "loss": 4.086, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8783647795445775e-05, |
| "loss": 4.0826, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8775261847935255e-05, |
| "loss": 4.0918, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8766875900424735e-05, |
| "loss": 4.0844, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8758489952914215e-05, |
| "loss": 4.0984, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.109011650085449, |
| "eval_runtime": 298.977, |
| "eval_samples_per_second": 1276.322, |
| "eval_steps_per_second": 39.886, |
| "step": 686880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.875012038420743e-05, |
| "loss": 4.0863, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.874173443669691e-05, |
| "loss": 4.0809, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873334848918639e-05, |
| "loss": 4.0909, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872496254167587e-05, |
| "loss": 4.0863, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871657659416535e-05, |
| "loss": 4.0994, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870820702545856e-05, |
| "loss": 4.089, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869982107794804e-05, |
| "loss": 4.0854, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869143513043752e-05, |
| "loss": 4.0958, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8683049182927e-05, |
| "loss": 4.0828, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.867466323541648e-05, |
| "loss": 4.0912, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866627728790596e-05, |
| "loss": 4.0921, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865789134039544e-05, |
| "loss": 4.0922, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864952177168865e-05, |
| "loss": 4.093, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864113582417813e-05, |
| "loss": 4.0789, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.863274987666761e-05, |
| "loss": 4.0825, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.862436392915709e-05, |
| "loss": 4.0682, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.861597798164657e-05, |
| "loss": 4.0884, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.860759203413605e-05, |
| "loss": 4.0902, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859920608662553e-05, |
| "loss": 4.0767, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859082013911501e-05, |
| "loss": 4.1007, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.858245057040822e-05, |
| "loss": 4.0971, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.85740646228977e-05, |
| "loss": 4.095, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.856567867538718e-05, |
| "loss": 4.0919, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.855730910668039e-05, |
| "loss": 4.0883, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854892315916987e-05, |
| "loss": 4.0914, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854053721165935e-05, |
| "loss": 4.0874, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8532151264148833e-05, |
| "loss": 4.0753, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852376531663831e-05, |
| "loss": 4.0923, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851537936912779e-05, |
| "loss": 4.0758, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850699342161727e-05, |
| "loss": 4.0801, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849862385291048e-05, |
| "loss": 4.0864, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849023790539996e-05, |
| "loss": 4.0892, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.848185195788944e-05, |
| "loss": 4.082, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.847346601037892e-05, |
| "loss": 4.0929, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84650800628684e-05, |
| "loss": 4.0801, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.845671049416161e-05, |
| "loss": 4.0758, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.844832454665109e-05, |
| "loss": 4.0897, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843993859914057e-05, |
| "loss": 4.0682, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843155265163005e-05, |
| "loss": 4.0817, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842316670411953e-05, |
| "loss": 4.0821, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.841478075660902e-05, |
| "loss": 4.0789, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.840641118790223e-05, |
| "loss": 4.0845, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839802524039171e-05, |
| "loss": 4.0856, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838963929288119e-05, |
| "loss": 4.085, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838125334537067e-05, |
| "loss": 4.0829, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.837286739786015e-05, |
| "loss": 4.0787, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836448145034963e-05, |
| "loss": 4.0824, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.83560955028391e-05, |
| "loss": 4.0831, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8347725934132316e-05, |
| "loss": 4.0607, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8339339986621796e-05, |
| "loss": 4.0885, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8330954039111276e-05, |
| "loss": 4.0737, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8322568091600756e-05, |
| "loss": 4.0819, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8314182144090236e-05, |
| "loss": 4.0701, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8305796196579716e-05, |
| "loss": 4.07, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8297410249069195e-05, |
| "loss": 4.0732, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8289024301558675e-05, |
| "loss": 4.0717, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8280654732851885e-05, |
| "loss": 4.0883, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8272268785341364e-05, |
| "loss": 4.0726, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8263882837830844e-05, |
| "loss": 4.0936, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8255496890320324e-05, |
| "loss": 4.082, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8247110942809804e-05, |
| "loss": 4.0583, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8238741374103013e-05, |
| "loss": 4.0782, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8230355426592493e-05, |
| "loss": 4.0676, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822198585788571e-05, |
| "loss": 4.0651, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.821359991037519e-05, |
| "loss": 4.0823, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.820521396286467e-05, |
| "loss": 4.0805, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.819684439415788e-05, |
| "loss": 4.072, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818845844664736e-05, |
| "loss": 4.0782, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818007249913684e-05, |
| "loss": 4.0614, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817168655162632e-05, |
| "loss": 4.0661, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.81633006041158e-05, |
| "loss": 4.0772, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.815491465660528e-05, |
| "loss": 4.0805, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.814652870909476e-05, |
| "loss": 4.0838, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813814276158424e-05, |
| "loss": 4.0728, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.812975681407372e-05, |
| "loss": 4.0928, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.81213708665632e-05, |
| "loss": 4.0661, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.811298491905268e-05, |
| "loss": 4.0866, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.810461535034589e-05, |
| "loss": 4.0655, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8096229402835374e-05, |
| "loss": 4.0707, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8087843455324854e-05, |
| "loss": 4.0751, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8079457507814334e-05, |
| "loss": 4.0812, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8071071560303814e-05, |
| "loss": 4.0789, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.806268561279329e-05, |
| "loss": 4.0842, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.805429966528277e-05, |
| "loss": 4.0576, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804593009657598e-05, |
| "loss": 4.0611, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.803754414906546e-05, |
| "loss": 4.0804, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8029158201554936e-05, |
| "loss": 4.0771, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8020772254044416e-05, |
| "loss": 4.071, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.801240268533763e-05, |
| "loss": 4.0786, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800401673782711e-05, |
| "loss": 4.0704, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799563079031659e-05, |
| "loss": 4.0718, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798724484280607e-05, |
| "loss": 4.0694, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797885889529555e-05, |
| "loss": 4.0702, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797048932658876e-05, |
| "loss": 4.0841, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796210337907824e-05, |
| "loss": 4.0853, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.795371743156772e-05, |
| "loss": 4.0838, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79453314840572e-05, |
| "loss": 4.064, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793696191535041e-05, |
| "loss": 4.0698, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792857596783989e-05, |
| "loss": 4.0695, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792019002032937e-05, |
| "loss": 4.0748, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.791180407281885e-05, |
| "loss": 4.0743, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.790341812530833e-05, |
| "loss": 4.0677, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7895048556601545e-05, |
| "loss": 4.0716, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7886662609091025e-05, |
| "loss": 4.0646, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7878276661580505e-05, |
| "loss": 4.0657, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7869890714069985e-05, |
| "loss": 4.0692, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7861521145363194e-05, |
| "loss": 4.0611, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7853135197852674e-05, |
| "loss": 4.0736, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7844749250342154e-05, |
| "loss": 4.0712, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7836363302831634e-05, |
| "loss": 4.066, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7827977355321114e-05, |
| "loss": 4.0673, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7819591407810594e-05, |
| "loss": 4.0681, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7811205460300074e-05, |
| "loss": 4.0742, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7802819512789554e-05, |
| "loss": 4.0657, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7794433565279034e-05, |
| "loss": 4.0787, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.778606399657225e-05, |
| "loss": 4.0769, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777767804906173e-05, |
| "loss": 4.0745, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.776929210155121e-05, |
| "loss": 4.0711, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.776090615404069e-05, |
| "loss": 4.0689, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.77525365853339e-05, |
| "loss": 4.0648, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.774415063782338e-05, |
| "loss": 4.0739, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.773576469031286e-05, |
| "loss": 4.0668, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.772737874280234e-05, |
| "loss": 4.0575, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771900917409555e-05, |
| "loss": 4.0798, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771062322658503e-05, |
| "loss": 4.0732, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.770223727907451e-05, |
| "loss": 4.0704, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.769385133156399e-05, |
| "loss": 4.0613, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768546538405347e-05, |
| "loss": 4.0612, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767707943654295e-05, |
| "loss": 4.0652, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766869348903243e-05, |
| "loss": 4.0732, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766030754152191e-05, |
| "loss": 4.0705, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765192159401139e-05, |
| "loss": 4.0737, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7643552025304596e-05, |
| "loss": 4.0495, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7635166077794076e-05, |
| "loss": 4.0799, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7626780130283556e-05, |
| "loss": 4.0661, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7618394182773036e-05, |
| "loss": 4.0787, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7610008235262516e-05, |
| "loss": 4.0654, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7601638666555725e-05, |
| "loss": 4.0619, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.759326909784894e-05, |
| "loss": 4.0705, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758488315033842e-05, |
| "loss": 4.0816, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75764972028279e-05, |
| "loss": 4.062, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756811125531738e-05, |
| "loss": 4.0734, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755972530780686e-05, |
| "loss": 4.0632, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755133936029634e-05, |
| "loss": 4.0714, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.754296979158955e-05, |
| "loss": 4.0626, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.753458384407903e-05, |
| "loss": 4.0612, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.752619789656851e-05, |
| "loss": 4.0695, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751781194905799e-05, |
| "loss": 4.0623, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.750942600154747e-05, |
| "loss": 4.0793, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.093672752380371, |
| "eval_runtime": 297.6939, |
| "eval_samples_per_second": 1281.823, |
| "eval_steps_per_second": 40.058, |
| "step": 763200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.750104005403695e-05, |
| "loss": 4.0634, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.749265410652643e-05, |
| "loss": 4.0574, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.748426815901591e-05, |
| "loss": 4.0686, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.747588221150539e-05, |
| "loss": 4.0648, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.746749626399487e-05, |
| "loss": 4.0775, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.745911031648435e-05, |
| "loss": 4.0661, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7450724368973836e-05, |
| "loss": 4.0635, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.744233842146331e-05, |
| "loss": 4.0743, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.743395247395279e-05, |
| "loss": 4.0601, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.742556652644227e-05, |
| "loss": 4.0611, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.741718057893175e-05, |
| "loss": 4.0746, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740879463142123e-05, |
| "loss": 4.0708, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740042506271444e-05, |
| "loss": 4.0721, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.739203911520392e-05, |
| "loss": 4.0537, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.73836531676934e-05, |
| "loss": 4.061, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.737526722018288e-05, |
| "loss": 4.0449, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.736688127267236e-05, |
| "loss": 4.0675, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735849532516184e-05, |
| "loss": 4.0701, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735010937765132e-05, |
| "loss": 4.0498, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7341739808944534e-05, |
| "loss": 4.0772, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7333353861434014e-05, |
| "loss": 4.0766, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7324967913923494e-05, |
| "loss": 4.0722, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7316581966412974e-05, |
| "loss": 4.0691, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7308196018902454e-05, |
| "loss": 4.0676, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7299810071391934e-05, |
| "loss": 4.0682, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7291424123881414e-05, |
| "loss": 4.0648, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7283038176370894e-05, |
| "loss": 4.0544, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7274652228860374e-05, |
| "loss": 4.0686, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726626628134985e-05, |
| "loss": 4.0554, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.725788033383933e-05, |
| "loss": 4.0546, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724949438632881e-05, |
| "loss": 4.0672, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.724112481762202e-05, |
| "loss": 4.0644, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.72327388701115e-05, |
| "loss": 4.0648, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.722435292260098e-05, |
| "loss": 4.0697, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721596697509046e-05, |
| "loss": 4.056, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720759740638367e-05, |
| "loss": 4.0584, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719921145887315e-05, |
| "loss": 4.0641, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719082551136263e-05, |
| "loss": 4.0476, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.718243956385211e-05, |
| "loss": 4.0579, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.717405361634159e-05, |
| "loss": 4.0611, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.716566766883107e-05, |
| "loss": 4.053, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.715728172132055e-05, |
| "loss": 4.0655, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714889577381003e-05, |
| "loss": 4.0684, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7140542583906976e-05, |
| "loss": 4.0605, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7132156636396456e-05, |
| "loss": 4.0624, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7123770688885936e-05, |
| "loss": 4.0559, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7115384741375416e-05, |
| "loss": 4.0617, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7106998793864896e-05, |
| "loss": 4.0621, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7098612846354376e-05, |
| "loss": 4.0421, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7090226898843856e-05, |
| "loss": 4.0665, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7081840951333336e-05, |
| "loss": 4.0527, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7073455003822816e-05, |
| "loss": 4.0619, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7065085435116025e-05, |
| "loss": 4.0504, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7056699487605505e-05, |
| "loss": 4.0443, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7048313540094985e-05, |
| "loss": 4.0498, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7039927592584465e-05, |
| "loss": 4.0472, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7031558023877674e-05, |
| "loss": 4.0701, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.702317207636716e-05, |
| "loss": 4.0495, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701480250766037e-05, |
| "loss": 4.0724, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700641656014985e-05, |
| "loss": 4.0613, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699804699144306e-05, |
| "loss": 4.0374, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698966104393254e-05, |
| "loss": 4.0565, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698127509642202e-05, |
| "loss": 4.048, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69728891489115e-05, |
| "loss": 4.0427, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.696450320140098e-05, |
| "loss": 4.0586, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.695611725389046e-05, |
| "loss": 4.0618, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694773130637994e-05, |
| "loss": 4.0477, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.693934535886942e-05, |
| "loss": 4.0589, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69309594113589e-05, |
| "loss": 4.0381, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.692257346384838e-05, |
| "loss": 4.0478, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.691422027394532e-05, |
| "loss": 4.0532, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69058343264348e-05, |
| "loss": 4.0616, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.689744837892428e-05, |
| "loss": 4.0566, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688906243141376e-05, |
| "loss": 4.0528, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688067648390324e-05, |
| "loss": 4.0697, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.687229053639272e-05, |
| "loss": 4.0496, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.686392096768593e-05, |
| "loss": 4.063, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.685553502017541e-05, |
| "loss": 4.0419, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.684714907266489e-05, |
| "loss": 4.049, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683876312515437e-05, |
| "loss": 4.0542, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683037717764385e-05, |
| "loss": 4.0579, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682199123013333e-05, |
| "loss": 4.0578, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681360528262281e-05, |
| "loss": 4.0613, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680521933511229e-05, |
| "loss": 4.0421, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679683338760177e-05, |
| "loss": 4.0357, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678844744009125e-05, |
| "loss": 4.0586, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678006149258073e-05, |
| "loss": 4.0553, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677169192387394e-05, |
| "loss": 4.05, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.676330597636342e-05, |
| "loss": 4.0581, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.67549200288529e-05, |
| "loss": 4.0516, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.674653408134238e-05, |
| "loss": 4.049, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673814813383186e-05, |
| "loss": 4.05, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672976218632134e-05, |
| "loss": 4.0476, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672137623881082e-05, |
| "loss": 4.0631, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6713006670104037e-05, |
| "loss": 4.0625, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6704620722593516e-05, |
| "loss": 4.0645, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6696234775082996e-05, |
| "loss": 4.0414, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6687848827572476e-05, |
| "loss": 4.0485, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6679462880061956e-05, |
| "loss": 4.0509, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6671076932551436e-05, |
| "loss": 4.0524, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6662690985040916e-05, |
| "loss": 4.0539, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6654305037530396e-05, |
| "loss": 4.0473, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664591909001987e-05, |
| "loss": 4.0533, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6637565900116814e-05, |
| "loss": 4.0408, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6629179952606294e-05, |
| "loss": 4.0479, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6620794005095774e-05, |
| "loss": 4.0471, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6612408057585254e-05, |
| "loss": 4.0429, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6604022110074734e-05, |
| "loss": 4.0524, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6595636162564214e-05, |
| "loss": 4.0475, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6587250215053694e-05, |
| "loss": 4.045, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6578864267543174e-05, |
| "loss": 4.0493, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657049469883639e-05, |
| "loss": 4.0426, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656210875132587e-05, |
| "loss": 4.0527, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655372280381534e-05, |
| "loss": 4.0499, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.654533685630482e-05, |
| "loss": 4.0528, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653696728759804e-05, |
| "loss": 4.0608, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652859771889125e-05, |
| "loss": 4.0531, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652021177138073e-05, |
| "loss": 4.0489, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.651182582387021e-05, |
| "loss": 4.0504, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.650343987635969e-05, |
| "loss": 4.043, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.649505392884917e-05, |
| "loss": 4.0536, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.648666798133865e-05, |
| "loss": 4.0473, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.647828203382813e-05, |
| "loss": 4.039, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6469912465121344e-05, |
| "loss": 4.0576, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.646152651761082e-05, |
| "loss": 4.0543, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.64531405701003e-05, |
| "loss": 4.0508, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.644475462258978e-05, |
| "loss": 4.0415, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.643636867507926e-05, |
| "loss": 4.0429, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642798272756874e-05, |
| "loss": 4.0429, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6419596780058217e-05, |
| "loss": 4.0548, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6411210832547697e-05, |
| "loss": 4.0494, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6402841263840906e-05, |
| "loss": 4.0539, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.639445531633039e-05, |
| "loss": 4.0292, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638606936881987e-05, |
| "loss": 4.0634, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637768342130935e-05, |
| "loss": 4.0435, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636931385260256e-05, |
| "loss": 4.0655, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636092790509204e-05, |
| "loss": 4.0414, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.635254195758152e-05, |
| "loss": 4.0433, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6344156010071e-05, |
| "loss": 4.0489, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633577006256048e-05, |
| "loss": 4.0646, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632740049385369e-05, |
| "loss": 4.0421, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631901454634317e-05, |
| "loss": 4.0571, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631062859883265e-05, |
| "loss": 4.0411, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.630224265132213e-05, |
| "loss": 4.0521, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.629385670381161e-05, |
| "loss": 4.0395, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.628547075630109e-05, |
| "loss": 4.045, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6277101187594306e-05, |
| "loss": 4.0482, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6268715240083786e-05, |
| "loss": 4.0422, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6260329292573266e-05, |
| "loss": 4.0568, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.08021879196167, |
| "eval_runtime": 295.3299, |
| "eval_samples_per_second": 1292.084, |
| "eval_steps_per_second": 40.379, |
| "step": 839520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6251943345062746e-05, |
| "loss": 4.045, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6243557397552226e-05, |
| "loss": 4.0369, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6235187828845435e-05, |
| "loss": 4.0489, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6226801881334915e-05, |
| "loss": 4.0438, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6218415933824395e-05, |
| "loss": 4.0576, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6210029986313875e-05, |
| "loss": 4.0466, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6201660417607084e-05, |
| "loss": 4.0449, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6193274470096564e-05, |
| "loss": 4.0517, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6184888522586044e-05, |
| "loss": 4.0422, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.617650257507553e-05, |
| "loss": 4.041, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616813300636874e-05, |
| "loss": 4.0566, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615974705885822e-05, |
| "loss": 4.048, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615137749015143e-05, |
| "loss": 4.0563, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.614299154264091e-05, |
| "loss": 4.0346, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.613460559513039e-05, |
| "loss": 4.0368, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.612621964761987e-05, |
| "loss": 4.0299, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611783370010935e-05, |
| "loss": 4.0495, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.610944775259882e-05, |
| "loss": 4.0457, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.61010618050883e-05, |
| "loss": 4.0371, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.609267585757778e-05, |
| "loss": 4.0515, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.608428991006727e-05, |
| "loss": 4.062, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.607592034136048e-05, |
| "loss": 4.0503, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.606753439384996e-05, |
| "loss": 4.0482, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605914844633944e-05, |
| "loss": 4.0477, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605076249882892e-05, |
| "loss": 4.0507, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6042392930122126e-05, |
| "loss": 4.0438, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6034006982611606e-05, |
| "loss": 4.0378, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.602563741390482e-05, |
| "loss": 4.0479, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6017251466394295e-05, |
| "loss": 4.0344, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6008865518883775e-05, |
| "loss": 4.0356, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.6000479571373255e-05, |
| "loss": 4.0465, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5992093623862735e-05, |
| "loss": 4.0439, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5983707676352215e-05, |
| "loss": 4.0463, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59753217288417e-05, |
| "loss": 4.0484, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.596695216013491e-05, |
| "loss": 4.0377, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.595858259142812e-05, |
| "loss": 4.0417, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5950213022721336e-05, |
| "loss": 4.043, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5941827075210816e-05, |
| "loss": 4.0311, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5933441127700296e-05, |
| "loss": 4.0445, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.592505518018977e-05, |
| "loss": 4.0376, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.591666923267925e-05, |
| "loss": 4.0372, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.590828328516873e-05, |
| "loss": 4.044, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.589989733765821e-05, |
| "loss": 4.0536, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.589151139014769e-05, |
| "loss": 4.0344, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.588312544263717e-05, |
| "loss": 4.0438, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5874739495126656e-05, |
| "loss": 4.0398, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5866353547616136e-05, |
| "loss": 4.0371, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5857967600105615e-05, |
| "loss": 4.0443, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5849581652595095e-05, |
| "loss": 4.0253, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5841212083888305e-05, |
| "loss": 4.0432, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5832826136377784e-05, |
| "loss": 4.0358, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5824440188867264e-05, |
| "loss": 4.0446, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5816054241356744e-05, |
| "loss": 4.0308, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5807684672649953e-05, |
| "loss": 4.0236, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5799298725139433e-05, |
| "loss": 4.0311, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579092915643264e-05, |
| "loss": 4.024, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.578254320892212e-05, |
| "loss": 4.0524, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.577415726141161e-05, |
| "loss": 4.0309, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576577131390109e-05, |
| "loss": 4.0517, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57574017451943e-05, |
| "loss": 4.0402, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574901579768378e-05, |
| "loss": 4.0222, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.574062985017326e-05, |
| "loss": 4.036, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.573224390266274e-05, |
| "loss": 4.0305, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.572385795515222e-05, |
| "loss": 4.0232, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.57154720076417e-05, |
| "loss": 4.0383, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.570708606013118e-05, |
| "loss": 4.0421, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569870011262066e-05, |
| "loss": 4.029, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569033054391387e-05, |
| "loss": 4.0404, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.568194459640335e-05, |
| "loss": 4.0155, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.567355864889283e-05, |
| "loss": 4.0306, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.566517270138231e-05, |
| "loss": 4.0361, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.565680313267552e-05, |
| "loss": 4.0441, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5648417185165e-05, |
| "loss": 4.0378, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.564004761645821e-05, |
| "loss": 4.0333, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.563166166894769e-05, |
| "loss": 4.0501, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.562327572143717e-05, |
| "loss": 4.0331, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.561490615273038e-05, |
| "loss": 4.0423, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.560652020521986e-05, |
| "loss": 4.0263, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559813425770934e-05, |
| "loss": 4.0316, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558974831019882e-05, |
| "loss": 4.0333, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.558137874149203e-05, |
| "loss": 4.0371, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5572992793981517e-05, |
| "loss": 4.0389, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5564606846470997e-05, |
| "loss": 4.0447, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5556220898960476e-05, |
| "loss": 4.0248, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5547834951449956e-05, |
| "loss": 4.0163, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553944900393943e-05, |
| "loss": 4.0342, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553106305642891e-05, |
| "loss": 4.0404, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.552267710891839e-05, |
| "loss": 4.0311, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5514307540211605e-05, |
| "loss": 4.0384, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.550592159270108e-05, |
| "loss": 4.0324, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.549753564519056e-05, |
| "loss": 4.0293, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.548914969768004e-05, |
| "loss": 4.0329, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.548076375016952e-05, |
| "loss": 4.0277, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5472377802659e-05, |
| "loss": 4.0409, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5463991855148485e-05, |
| "loss": 4.0467, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5455605907637965e-05, |
| "loss": 4.0445, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5447236338931174e-05, |
| "loss": 4.0226, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5438850391420654e-05, |
| "loss": 4.0326, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5430464443910134e-05, |
| "loss": 4.0284, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.542209487520334e-05, |
| "loss": 4.0348, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.541370892769282e-05, |
| "loss": 4.0366, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.54053229801823e-05, |
| "loss": 4.0276, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.539693703267178e-05, |
| "loss": 4.0339, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538855108516126e-05, |
| "loss": 4.0244, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.538016513765074e-05, |
| "loss": 4.0282, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.537177919014022e-05, |
| "loss": 4.0292, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.536340962143343e-05, |
| "loss": 4.0231, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.535502367392292e-05, |
| "loss": 4.0326, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.53466377264124e-05, |
| "loss": 4.0313, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533825177890188e-05, |
| "loss": 4.0252, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532986583139136e-05, |
| "loss": 4.0268, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.532147988388084e-05, |
| "loss": 4.0297, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.531309393637032e-05, |
| "loss": 4.0341, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.530470798885979e-05, |
| "loss": 4.0313, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.529632204134927e-05, |
| "loss": 4.031, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528795247264249e-05, |
| "loss": 4.0405, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527956652513197e-05, |
| "loss": 4.0405, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5271196956425177e-05, |
| "loss": 4.0291, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5262811008914657e-05, |
| "loss": 4.0328, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5254425061404136e-05, |
| "loss": 4.0242, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5246039113893616e-05, |
| "loss": 4.0368, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5237653166383096e-05, |
| "loss": 4.0303, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5229267218872576e-05, |
| "loss": 4.0205, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5220881271362056e-05, |
| "loss": 4.0391, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5212495323851536e-05, |
| "loss": 4.0349, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5204125755144745e-05, |
| "loss": 4.0314, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.519575618643796e-05, |
| "loss": 4.0268, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5187370238927434e-05, |
| "loss": 4.0212, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5178984291416914e-05, |
| "loss": 4.0218, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5170598343906394e-05, |
| "loss": 4.037, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5162212396395874e-05, |
| "loss": 4.0322, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5153826448885354e-05, |
| "loss": 4.0373, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.514544050137484e-05, |
| "loss": 4.0144, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.513705455386432e-05, |
| "loss": 4.036, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512868498515753e-05, |
| "loss": 4.0298, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512029903764701e-05, |
| "loss": 4.04, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.511191309013649e-05, |
| "loss": 4.0269, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.510352714262597e-05, |
| "loss": 4.0256, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.509515757391918e-05, |
| "loss": 4.0324, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.508678800521239e-05, |
| "loss": 4.0445, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.507840205770187e-05, |
| "loss": 4.0252, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.507001611019135e-05, |
| "loss": 4.0357, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.506163016268083e-05, |
| "loss": 4.0251, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.505324421517031e-05, |
| "loss": 4.0329, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5044858267659795e-05, |
| "loss": 4.025, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5036472320149275e-05, |
| "loss": 4.0248, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5028086372638755e-05, |
| "loss": 4.0282, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5019700425128234e-05, |
| "loss": 4.029, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5011314477617714e-05, |
| "loss": 4.0358, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.068859100341797, |
| "eval_runtime": 301.1686, |
| "eval_samples_per_second": 1267.034, |
| "eval_steps_per_second": 39.596, |
| "step": 915840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.5002928530107194e-05, |
| "loss": 4.0471, |
| "step": 915968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4994542582596674e-05, |
| "loss": 4.0114, |
| "step": 916480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4986156635086154e-05, |
| "loss": 4.0291, |
| "step": 916992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.497777068757563e-05, |
| "loss": 4.0297, |
| "step": 917504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.496938474006511e-05, |
| "loss": 4.037, |
| "step": 918016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.496099879255459e-05, |
| "loss": 4.0346, |
| "step": 918528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.495261284504407e-05, |
| "loss": 4.0265, |
| "step": 919040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.494422689753355e-05, |
| "loss": 4.0333, |
| "step": 919552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.493584095002303e-05, |
| "loss": 4.0234, |
| "step": 920064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.492745500251251e-05, |
| "loss": 4.0284, |
| "step": 920576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.491906905500199e-05, |
| "loss": 4.0346, |
| "step": 921088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4910683107491474e-05, |
| "loss": 4.0308, |
| "step": 921600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.490231353878468e-05, |
| "loss": 4.0379, |
| "step": 922112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.489392759127416e-05, |
| "loss": 4.0173, |
| "step": 922624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.488554164376364e-05, |
| "loss": 4.0157, |
| "step": 923136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.487715569625312e-05, |
| "loss": 4.0154, |
| "step": 923648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.48687697487426e-05, |
| "loss": 4.0313, |
| "step": 924160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.486038380123208e-05, |
| "loss": 4.0248, |
| "step": 924672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.485199785372156e-05, |
| "loss": 4.0225, |
| "step": 925184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.484361190621104e-05, |
| "loss": 4.031, |
| "step": 925696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4835225958700516e-05, |
| "loss": 4.0459, |
| "step": 926208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4826840011189996e-05, |
| "loss": 4.0323, |
| "step": 926720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4818454063679476e-05, |
| "loss": 4.0296, |
| "step": 927232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4810068116168956e-05, |
| "loss": 4.0314, |
| "step": 927744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.480168216865844e-05, |
| "loss": 4.0359, |
| "step": 928256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.479331259995165e-05, |
| "loss": 4.0246, |
| "step": 928768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.478492665244113e-05, |
| "loss": 4.0191, |
| "step": 929280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.477654070493061e-05, |
| "loss": 4.0333, |
| "step": 929792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.476817113622382e-05, |
| "loss": 4.012, |
| "step": 930304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.47597851887133e-05, |
| "loss": 4.0201, |
| "step": 930816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.475139924120278e-05, |
| "loss": 4.0278, |
| "step": 931328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.474301329369226e-05, |
| "loss": 4.0243, |
| "step": 931840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.473462734618174e-05, |
| "loss": 4.0307, |
| "step": 932352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.472624139867122e-05, |
| "loss": 4.0272, |
| "step": 932864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.47178554511607e-05, |
| "loss": 4.0221, |
| "step": 933376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.470948588245391e-05, |
| "loss": 4.0235, |
| "step": 933888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4701116313747125e-05, |
| "loss": 4.026, |
| "step": 934400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4692730366236605e-05, |
| "loss": 4.0109, |
| "step": 934912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4684344418726085e-05, |
| "loss": 4.0278, |
| "step": 935424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4675958471215565e-05, |
| "loss": 4.02, |
| "step": 935936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4667572523705045e-05, |
| "loss": 4.0202, |
| "step": 936448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4659186576194525e-05, |
| "loss": 4.0285, |
| "step": 936960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4650800628684005e-05, |
| "loss": 4.0359, |
| "step": 937472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4642414681173485e-05, |
| "loss": 4.0158, |
| "step": 937984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4634028733662965e-05, |
| "loss": 4.0266, |
| "step": 938496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4625642786152445e-05, |
| "loss": 4.0245, |
| "step": 939008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4617256838641925e-05, |
| "loss": 4.0171, |
| "step": 939520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.46088708911314e-05, |
| "loss": 4.0289, |
| "step": 940032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4600501322424614e-05, |
| "loss": 4.0103, |
| "step": 940544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4592115374914094e-05, |
| "loss": 4.0205, |
| "step": 941056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.458372942740358e-05, |
| "loss": 4.023, |
| "step": 941568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4575343479893054e-05, |
| "loss": 4.0255, |
| "step": 942080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.456697391118627e-05, |
| "loss": 4.0137, |
| "step": 942592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455858796367575e-05, |
| "loss": 4.0106, |
| "step": 943104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.455020201616523e-05, |
| "loss": 4.0116, |
| "step": 943616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.454183244745844e-05, |
| "loss": 4.0052, |
| "step": 944128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.453344649994792e-05, |
| "loss": 4.0383, |
| "step": 944640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.45250605524374e-05, |
| "loss": 4.011, |
| "step": 945152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.451667460492687e-05, |
| "loss": 4.0344, |
| "step": 945664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.450830503622009e-05, |
| "loss": 4.0234, |
| "step": 946176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.449991908870957e-05, |
| "loss": 4.0066, |
| "step": 946688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.449153314119905e-05, |
| "loss": 4.0164, |
| "step": 947200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.448314719368853e-05, |
| "loss": 4.013, |
| "step": 947712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.447476124617801e-05, |
| "loss": 4.0062, |
| "step": 948224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.446637529866749e-05, |
| "loss": 4.0183, |
| "step": 948736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.445798935115697e-05, |
| "loss": 4.0234, |
| "step": 949248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.444960340364645e-05, |
| "loss": 4.0165, |
| "step": 949760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4441233834939656e-05, |
| "loss": 4.0202, |
| "step": 950272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4432847887429136e-05, |
| "loss": 3.9995, |
| "step": 950784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4424461939918616e-05, |
| "loss": 4.0137, |
| "step": 951296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4416075992408096e-05, |
| "loss": 4.0191, |
| "step": 951808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4407690044897576e-05, |
| "loss": 4.0267, |
| "step": 952320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439933685499452e-05, |
| "loss": 4.0251, |
| "step": 952832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4390950907484e-05, |
| "loss": 4.0184, |
| "step": 953344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.438256495997348e-05, |
| "loss": 4.0264, |
| "step": 953856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.437417901246296e-05, |
| "loss": 4.0161, |
| "step": 954368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.436579306495244e-05, |
| "loss": 4.0256, |
| "step": 954880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435740711744192e-05, |
| "loss": 4.012, |
| "step": 955392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43490211699314e-05, |
| "loss": 4.0174, |
| "step": 955904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.434063522242088e-05, |
| "loss": 4.0139, |
| "step": 956416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.433224927491036e-05, |
| "loss": 4.0206, |
| "step": 956928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.432387970620357e-05, |
| "loss": 4.0266, |
| "step": 957440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.431549375869305e-05, |
| "loss": 4.0256, |
| "step": 957952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.430710781118253e-05, |
| "loss": 4.0124, |
| "step": 958464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.429873824247574e-05, |
| "loss": 3.9956, |
| "step": 958976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.429035229496522e-05, |
| "loss": 4.0207, |
| "step": 959488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4281966347454706e-05, |
| "loss": 4.0215, |
| "step": 960000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4273580399944186e-05, |
| "loss": 4.0155, |
| "step": 960512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4265194452433666e-05, |
| "loss": 4.0204, |
| "step": 961024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4256808504923146e-05, |
| "loss": 4.0184, |
| "step": 961536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4248422557412625e-05, |
| "loss": 4.0106, |
| "step": 962048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4240036609902105e-05, |
| "loss": 4.0183, |
| "step": 962560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4231650662391585e-05, |
| "loss": 4.0124, |
| "step": 963072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4223264714881065e-05, |
| "loss": 4.0215, |
| "step": 963584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.421487876737054e-05, |
| "loss": 4.0278, |
| "step": 964096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4206509198663754e-05, |
| "loss": 4.0299, |
| "step": 964608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4198123251153234e-05, |
| "loss": 4.0052, |
| "step": 965120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.418973730364271e-05, |
| "loss": 4.0155, |
| "step": 965632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4181367734935923e-05, |
| "loss": 4.0151, |
| "step": 966144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.41729817874254e-05, |
| "loss": 4.0174, |
| "step": 966656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.416459583991489e-05, |
| "loss": 4.0205, |
| "step": 967168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.415620989240436e-05, |
| "loss": 4.0111, |
| "step": 967680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.414782394489384e-05, |
| "loss": 4.0164, |
| "step": 968192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.413943799738332e-05, |
| "loss": 4.0079, |
| "step": 968704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.41310520498728e-05, |
| "loss": 4.015, |
| "step": 969216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.412266610236228e-05, |
| "loss": 4.0113, |
| "step": 969728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.411429653365549e-05, |
| "loss": 4.0088, |
| "step": 970240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.410591058614497e-05, |
| "loss": 4.0163, |
| "step": 970752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.409754101743818e-05, |
| "loss": 4.0124, |
| "step": 971264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.408915506992766e-05, |
| "loss": 4.0107, |
| "step": 971776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.408076912241714e-05, |
| "loss": 4.0146, |
| "step": 972288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.407238317490663e-05, |
| "loss": 4.0095, |
| "step": 972800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.406399722739611e-05, |
| "loss": 4.0198, |
| "step": 973312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.405562765868932e-05, |
| "loss": 4.0145, |
| "step": 973824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40472417111788e-05, |
| "loss": 4.0178, |
| "step": 974336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.403885576366828e-05, |
| "loss": 4.0191, |
| "step": 974848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.403046981615776e-05, |
| "loss": 4.0271, |
| "step": 975360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.402208386864724e-05, |
| "loss": 4.0128, |
| "step": 975872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.401369792113672e-05, |
| "loss": 4.0172, |
| "step": 976384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40053119736262e-05, |
| "loss": 4.01, |
| "step": 976896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3996942404919406e-05, |
| "loss": 4.0184, |
| "step": 977408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3988556457408886e-05, |
| "loss": 4.0157, |
| "step": 977920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3980170509898366e-05, |
| "loss": 4.0053, |
| "step": 978432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3971784562387846e-05, |
| "loss": 4.0175, |
| "step": 978944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3963398614877326e-05, |
| "loss": 4.0218, |
| "step": 979456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.395502904617054e-05, |
| "loss": 4.0162, |
| "step": 979968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.394664309866002e-05, |
| "loss": 4.0101, |
| "step": 980480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.39382571511495e-05, |
| "loss": 4.0029, |
| "step": 980992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.392987120363898e-05, |
| "loss": 4.0071, |
| "step": 981504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.392148525612846e-05, |
| "loss": 4.0237, |
| "step": 982016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.391309930861794e-05, |
| "loss": 4.0144, |
| "step": 982528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.390471336110742e-05, |
| "loss": 4.0214, |
| "step": 983040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3896327413596894e-05, |
| "loss": 3.996, |
| "step": 983552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.388795784489011e-05, |
| "loss": 4.0208, |
| "step": 984064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387957189737959e-05, |
| "loss": 4.0177, |
| "step": 984576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387118594986907e-05, |
| "loss": 4.0214, |
| "step": 985088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.386280000235855e-05, |
| "loss": 4.0113, |
| "step": 985600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.385441405484803e-05, |
| "loss": 4.0092, |
| "step": 986112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3846044486141246e-05, |
| "loss": 4.0153, |
| "step": 986624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3837658538630726e-05, |
| "loss": 4.0317, |
| "step": 987136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.38292725911202e-05, |
| "loss": 4.0091, |
| "step": 987648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.382088664360968e-05, |
| "loss": 4.0184, |
| "step": 988160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3812517074902895e-05, |
| "loss": 4.0111, |
| "step": 988672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.380413112739237e-05, |
| "loss": 4.0168, |
| "step": 989184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.379574517988185e-05, |
| "loss": 4.0059, |
| "step": 989696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.378735923237133e-05, |
| "loss": 4.0141, |
| "step": 990208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377897328486081e-05, |
| "loss": 4.0083, |
| "step": 990720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.377060371615402e-05, |
| "loss": 4.0113, |
| "step": 991232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.37622177686435e-05, |
| "loss": 4.0206, |
| "step": 991744 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.059417724609375, |
| "eval_runtime": 290.1409, |
| "eval_samples_per_second": 1315.192, |
| "eval_steps_per_second": 41.101, |
| "step": 992160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3753831821132984e-05, |
| "loss": 4.0157, |
| "step": 992256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3745445873622464e-05, |
| "loss": 4.0019, |
| "step": 992768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.373707630491567e-05, |
| "loss": 4.0122, |
| "step": 993280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372869035740515e-05, |
| "loss": 4.0185, |
| "step": 993792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372032078869837e-05, |
| "loss": 4.016, |
| "step": 994304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.371193484118784e-05, |
| "loss": 4.0206, |
| "step": 994816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.370354889367732e-05, |
| "loss": 4.0077, |
| "step": 995328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.36951629461668e-05, |
| "loss": 4.0191, |
| "step": 995840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.368677699865628e-05, |
| "loss": 4.0079, |
| "step": 996352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367839105114576e-05, |
| "loss": 4.0121, |
| "step": 996864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367000510363524e-05, |
| "loss": 4.0169, |
| "step": 997376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.366161915612472e-05, |
| "loss": 4.0149, |
| "step": 997888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.365324958741794e-05, |
| "loss": 4.0252, |
| "step": 998400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.364486363990742e-05, |
| "loss": 4.0003, |
| "step": 998912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3636494071200626e-05, |
| "loss": 3.9993, |
| "step": 999424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3628108123690106e-05, |
| "loss": 4.0029, |
| "step": 999936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3619722176179586e-05, |
| "loss": 4.0146, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3611336228669066e-05, |
| "loss": 4.0086, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3602950281158546e-05, |
| "loss": 4.0057, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3594564333648026e-05, |
| "loss": 4.0174, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3586194764941235e-05, |
| "loss": 4.0262, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3577808817430715e-05, |
| "loss": 4.0213, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3569422869920195e-05, |
| "loss": 4.0127, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3561036922409675e-05, |
| "loss": 4.0167, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3552650974899155e-05, |
| "loss": 4.0193, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3544265027388635e-05, |
| "loss": 4.0124, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.353587907987812e-05, |
| "loss": 4.001, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.35274931323676e-05, |
| "loss": 4.0196, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351912356366081e-05, |
| "loss": 3.9957, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351073761615029e-05, |
| "loss": 4.0058, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.350235166863977e-05, |
| "loss": 4.0081, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.349396572112925e-05, |
| "loss": 4.0155, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.348557977361873e-05, |
| "loss": 4.0153, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3477193826108204e-05, |
| "loss": 4.0127, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3468807878597684e-05, |
| "loss": 4.0093, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34604383098909e-05, |
| "loss": 4.0086, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.345205236238038e-05, |
| "loss": 4.0059, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.344368279367359e-05, |
| "loss": 3.9948, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3435296846163076e-05, |
| "loss": 4.0155, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3426910898652555e-05, |
| "loss": 4.005, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341852495114203e-05, |
| "loss": 4.0078, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341013900363151e-05, |
| "loss": 4.0098, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.340175305612099e-05, |
| "loss": 4.0185, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.339336710861047e-05, |
| "loss": 4.0057, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.338498116109995e-05, |
| "loss": 4.0112, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.337659521358943e-05, |
| "loss": 4.0092, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336820926607891e-05, |
| "loss": 4.0033, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335982331856839e-05, |
| "loss": 4.0111, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335143737105787e-05, |
| "loss": 3.9956, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.334306780235108e-05, |
| "loss": 4.0052, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.333468185484056e-05, |
| "loss": 4.0114, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3326295907330044e-05, |
| "loss": 4.0081, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3317909959819524e-05, |
| "loss": 3.9985, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3309524012309004e-05, |
| "loss": 3.99, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330115444360221e-05, |
| "loss": 4.0007, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.329278487489542e-05, |
| "loss": 3.9916, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.32843989273849e-05, |
| "loss": 4.0269, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.327602935867811e-05, |
| "loss": 3.9944, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.326764341116759e-05, |
| "loss": 4.0221, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325925746365707e-05, |
| "loss": 4.0061, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.325087151614655e-05, |
| "loss": 3.994, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.324248556863603e-05, |
| "loss": 3.9984, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.323409962112551e-05, |
| "loss": 3.999, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.322571367361499e-05, |
| "loss": 3.9969, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321732772610448e-05, |
| "loss": 3.9976, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320894177859396e-05, |
| "loss": 4.0133, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.320055583108344e-05, |
| "loss": 4.0036, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.319216988357292e-05, |
| "loss": 4.0013, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.318378393606239e-05, |
| "loss": 3.989, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.317541436735561e-05, |
| "loss": 3.9976, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3167044798648816e-05, |
| "loss": 4.0009, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3158658851138296e-05, |
| "loss": 4.0163, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3150272903627776e-05, |
| "loss": 4.0075, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3141886956117256e-05, |
| "loss": 4.0024, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3133501008606736e-05, |
| "loss": 4.0096, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3125131439899945e-05, |
| "loss": 4.0049, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.311674549238943e-05, |
| "loss": 4.0088, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.310835954487891e-05, |
| "loss": 3.9972, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.309997359736839e-05, |
| "loss": 4.0042, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3091587649857864e-05, |
| "loss": 3.998, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3083201702347344e-05, |
| "loss": 4.0041, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3074815754836824e-05, |
| "loss": 4.0147, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.306644618613004e-05, |
| "loss": 4.0114, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3058060238619513e-05, |
| "loss": 4.0002, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.304967429110899e-05, |
| "loss": 3.9794, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.304128834359847e-05, |
| "loss": 4.0083, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.303291877489169e-05, |
| "loss": 4.0073, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.302453282738117e-05, |
| "loss": 4.003, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.301614687987065e-05, |
| "loss": 4.0011, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.300776093236013e-05, |
| "loss": 4.0084, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299937498484961e-05, |
| "loss": 3.9966, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299100541614282e-05, |
| "loss": 4.0039, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29826194686323e-05, |
| "loss": 3.9982, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.297423352112178e-05, |
| "loss": 4.0084, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.296584757361126e-05, |
| "loss": 4.013, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.295746162610074e-05, |
| "loss": 4.0146, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.294907567859022e-05, |
| "loss": 3.9953, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29406897310797e-05, |
| "loss": 4.0, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.293230378356918e-05, |
| "loss": 4.0011, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.292393421486239e-05, |
| "loss": 4.0042, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.29155646461556e-05, |
| "loss": 4.0043, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.290717869864508e-05, |
| "loss": 3.9982, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289879275113456e-05, |
| "loss": 4.0015, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.289040680362404e-05, |
| "loss": 3.9916, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.288202085611352e-05, |
| "loss": 4.0034, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2873634908603e-05, |
| "loss": 3.9918, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.286524896109248e-05, |
| "loss": 3.9959, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.285687939238569e-05, |
| "loss": 4.0018, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284849344487517e-05, |
| "loss": 3.9947, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284010749736465e-05, |
| "loss": 3.9981, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.283172154985413e-05, |
| "loss": 4.0057, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.282333560234361e-05, |
| "loss": 3.992, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.281494965483309e-05, |
| "loss": 4.005, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.280656370732257e-05, |
| "loss": 4.0011, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279817775981205e-05, |
| "loss": 3.9983, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278979181230153e-05, |
| "loss": 4.0075, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278142224359475e-05, |
| "loss": 4.0151, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2773069053691685e-05, |
| "loss": 3.9989, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2764683106181165e-05, |
| "loss": 4.0017, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2756297158670645e-05, |
| "loss": 3.9942, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2747911211160125e-05, |
| "loss": 4.0039, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2739525263649605e-05, |
| "loss": 4.0006, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2731139316139085e-05, |
| "loss": 3.9937, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2722753368628565e-05, |
| "loss": 4.0004, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2714367421118045e-05, |
| "loss": 4.0096, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2705981473607525e-05, |
| "loss": 3.9995, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2697595526097005e-05, |
| "loss": 3.9981, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2689209578586485e-05, |
| "loss": 3.9893, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.26808400098797e-05, |
| "loss": 3.9951, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2672454062369174e-05, |
| "loss": 4.0046, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2664068114858654e-05, |
| "loss": 4.0022, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2655682167348134e-05, |
| "loss": 4.0082, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264731259864135e-05, |
| "loss": 3.9858, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.263894302993456e-05, |
| "loss": 4.0065, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.263055708242404e-05, |
| "loss": 4.0034, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.262217113491352e-05, |
| "loss": 4.0063, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2613785187403e-05, |
| "loss": 3.9954, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.260539923989248e-05, |
| "loss": 3.9955, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.259701329238196e-05, |
| "loss": 4.0078, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.258862734487144e-05, |
| "loss": 4.0134, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.258024139736092e-05, |
| "loss": 3.9967, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.25718554498504e-05, |
| "loss": 4.0017, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.256346950233988e-05, |
| "loss": 3.9981, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.255509993363309e-05, |
| "loss": 4.0035, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.254671398612257e-05, |
| "loss": 3.9953, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253832803861205e-05, |
| "loss": 4.0032, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252994209110153e-05, |
| "loss": 3.9927, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.252155614359101e-05, |
| "loss": 3.9987, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2513186574884216e-05, |
| "loss": 4.0044, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.050886154174805, |
| "eval_runtime": 291.7201, |
| "eval_samples_per_second": 1308.072, |
| "eval_steps_per_second": 40.878, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2504800627373696e-05, |
| "loss": 3.9891, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2496414679863176e-05, |
| "loss": 3.9934, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.248802873235266e-05, |
| "loss": 3.9942, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247964278484214e-05, |
| "loss": 4.0046, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247125683733162e-05, |
| "loss": 4.0006, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.24628708898211e-05, |
| "loss": 4.0075, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.245448494231058e-05, |
| "loss": 3.9918, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.244609899480006e-05, |
| "loss": 4.0049, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2437713047289536e-05, |
| "loss": 3.9957, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2429327099779016e-05, |
| "loss": 3.9988, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2420941152268496e-05, |
| "loss": 3.9991, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2412555204757976e-05, |
| "loss": 4.0027, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2404185636051185e-05, |
| "loss": 4.0128, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2395799688540665e-05, |
| "loss": 3.9916, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2387413741030145e-05, |
| "loss": 3.9807, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237902779351963e-05, |
| "loss": 3.9914, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237064184600911e-05, |
| "loss": 3.9963, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.236225589849859e-05, |
| "loss": 3.9981, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.235386995098807e-05, |
| "loss": 3.992, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.234548400347755e-05, |
| "loss": 4.0027, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.233711443477076e-05, |
| "loss": 4.0141, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232872848726024e-05, |
| "loss": 4.002, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232034253974972e-05, |
| "loss": 4.0039, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23119565922392e-05, |
| "loss": 4.0028, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.230357064472868e-05, |
| "loss": 4.0085, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.229518469721816e-05, |
| "loss": 3.9951, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.228679874970764e-05, |
| "loss": 3.9927, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.227842918100085e-05, |
| "loss": 4.0022, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.227004323349033e-05, |
| "loss": 3.9858, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2261657285979816e-05, |
| "loss": 3.9896, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2253271338469296e-05, |
| "loss": 3.9949, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2244885390958776e-05, |
| "loss": 4.0001, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.223649944344825e-05, |
| "loss": 4.0036, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.222811349593773e-05, |
| "loss": 3.9933, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221972754842721e-05, |
| "loss": 4.004, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2211357979720425e-05, |
| "loss": 3.9933, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.22029720322099e-05, |
| "loss": 3.9902, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2194602463503114e-05, |
| "loss": 3.9846, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2186216515992594e-05, |
| "loss": 3.9997, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217783056848207e-05, |
| "loss": 3.9893, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2169444620971554e-05, |
| "loss": 3.9922, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2161058673461034e-05, |
| "loss": 3.9994, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2152672725950514e-05, |
| "loss": 4.0059, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2144286778439994e-05, |
| "loss": 3.9908, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2135900830929474e-05, |
| "loss": 4.0002, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.212753126222268e-05, |
| "loss": 3.9967, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.211914531471216e-05, |
| "loss": 3.9888, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.211075936720164e-05, |
| "loss": 3.9979, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.210237341969112e-05, |
| "loss": 3.9845, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.209400385098433e-05, |
| "loss": 3.9889, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.208561790347381e-05, |
| "loss": 4.0027, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.207723195596329e-05, |
| "loss": 3.9914, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206884600845277e-05, |
| "loss": 3.9868, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206046006094225e-05, |
| "loss": 3.9751, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.20521068710392e-05, |
| "loss": 3.9921, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2043720923528677e-05, |
| "loss": 3.9737, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2035334976018157e-05, |
| "loss": 4.0153, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2026949028507636e-05, |
| "loss": 3.9846, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2018563080997116e-05, |
| "loss": 4.0098, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2010177133486596e-05, |
| "loss": 3.9939, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2001807564779806e-05, |
| "loss": 3.9764, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1993421617269285e-05, |
| "loss": 3.9849, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1985035669758765e-05, |
| "loss": 3.9926, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1976649722248245e-05, |
| "loss": 3.9811, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1968263774737725e-05, |
| "loss": 3.9786, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1959877827227205e-05, |
| "loss": 4.0027, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1951491879716685e-05, |
| "loss": 3.9955, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.194310593220617e-05, |
| "loss": 3.9849, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.193473636349938e-05, |
| "loss": 3.9761, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.192635041598886e-05, |
| "loss": 3.9841, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.191796446847834e-05, |
| "loss": 3.9891, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.190959489977155e-05, |
| "loss": 4.0025, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.190120895226103e-05, |
| "loss": 3.9925, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.189282300475051e-05, |
| "loss": 3.993, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.188443705723999e-05, |
| "loss": 3.9959, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.187605110972947e-05, |
| "loss": 3.9925, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.186766516221895e-05, |
| "loss": 3.9954, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.185927921470843e-05, |
| "loss": 3.9837, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.185089326719791e-05, |
| "loss": 3.9942, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1842523698491126e-05, |
| "loss": 3.987, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1834137750980606e-05, |
| "loss": 3.9877, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1825751803470086e-05, |
| "loss": 3.9997, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181736585595956e-05, |
| "loss": 4.0005, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.180897990844904e-05, |
| "loss": 3.9844, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1800610339742255e-05, |
| "loss": 3.9683, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1792240771035464e-05, |
| "loss": 3.997, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1783854823524944e-05, |
| "loss": 3.9894, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1775468876014424e-05, |
| "loss": 3.9947, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1767082928503904e-05, |
| "loss": 3.987, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.175869698099338e-05, |
| "loss": 3.9928, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1750311033482863e-05, |
| "loss": 3.988, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.174192508597234e-05, |
| "loss": 3.9851, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.173353913846182e-05, |
| "loss": 3.9858, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.172516956975503e-05, |
| "loss": 3.9921, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.171678362224451e-05, |
| "loss": 3.9998, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170839767473399e-05, |
| "loss": 4.004, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170001172722347e-05, |
| "loss": 3.9836, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.169164215851668e-05, |
| "loss": 3.9869, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.168325621100616e-05, |
| "loss": 3.9907, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.167487026349564e-05, |
| "loss": 3.9951, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.166648431598512e-05, |
| "loss": 3.9858, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.165811474727833e-05, |
| "loss": 3.9866, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1649745178571546e-05, |
| "loss": 3.9881, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164135923106103e-05, |
| "loss": 3.9777, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1632973283550506e-05, |
| "loss": 3.9956, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1624587336039986e-05, |
| "loss": 3.9748, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1616201388529466e-05, |
| "loss": 3.981, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1607815441018946e-05, |
| "loss": 3.9956, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1599445872312155e-05, |
| "loss": 3.9814, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1591059924801635e-05, |
| "loss": 3.9844, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1582673977291115e-05, |
| "loss": 3.9927, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1574288029780595e-05, |
| "loss": 3.9783, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1565902082270075e-05, |
| "loss": 3.9916, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1557516134759555e-05, |
| "loss": 3.9871, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1549130187249035e-05, |
| "loss": 3.9871, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1540744239738515e-05, |
| "loss": 3.9927, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1532358292227995e-05, |
| "loss": 4.0001, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.152400510232494e-05, |
| "loss": 3.9904, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.151561915481442e-05, |
| "loss": 3.9865, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.15072332073039e-05, |
| "loss": 3.9835, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149884725979338e-05, |
| "loss": 3.993, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149046131228286e-05, |
| "loss": 3.9854, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.148209174357607e-05, |
| "loss": 3.9853, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.147370579606555e-05, |
| "loss": 3.9881, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.146531984855503e-05, |
| "loss": 3.9944, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145693390104451e-05, |
| "loss": 3.988, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.144854795353399e-05, |
| "loss": 3.9866, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.144016200602347e-05, |
| "loss": 3.9796, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1431792437316684e-05, |
| "loss": 3.9832, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1423406489806164e-05, |
| "loss": 3.9864, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1415020542295644e-05, |
| "loss": 3.9933, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1406634594785124e-05, |
| "loss": 3.9921, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1398248647274604e-05, |
| "loss": 3.9748, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1389862699764084e-05, |
| "loss": 3.9943, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1381476752253564e-05, |
| "loss": 3.9902, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.137309080474304e-05, |
| "loss": 3.9921, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.136472123603625e-05, |
| "loss": 3.98, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.135633528852573e-05, |
| "loss": 3.9848, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134794934101521e-05, |
| "loss": 3.9956, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.133957977230842e-05, |
| "loss": 4.0009, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.13311938247979e-05, |
| "loss": 3.9859, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.132280787728739e-05, |
| "loss": 3.987, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.131442192977687e-05, |
| "loss": 3.9894, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.130605236107008e-05, |
| "loss": 3.9878, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129766641355956e-05, |
| "loss": 3.9815, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128928046604904e-05, |
| "loss": 3.9931, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128089451853851e-05, |
| "loss": 3.9779, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.127250857102799e-05, |
| "loss": 3.9936, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.126413900232121e-05, |
| "loss": 3.9856, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.04418420791626, |
| "eval_runtime": 296.6909, |
| "eval_samples_per_second": 1286.157, |
| "eval_steps_per_second": 40.193, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.125575305481069e-05, |
| "loss": 3.9597, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.124736710730016e-05, |
| "loss": 3.9814, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123898115978964e-05, |
| "loss": 3.9798, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.123059521227913e-05, |
| "loss": 3.9921, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1222209264768607e-05, |
| "loss": 3.9888, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1213823317258087e-05, |
| "loss": 3.9959, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1205437369747566e-05, |
| "loss": 3.9818, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1197051422237046e-05, |
| "loss": 3.9926, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1188665474726526e-05, |
| "loss": 3.9805, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1180279527216006e-05, |
| "loss": 3.9885, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1171893579705486e-05, |
| "loss": 3.9873, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1163507632194966e-05, |
| "loss": 3.9915, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1155121684684446e-05, |
| "loss": 3.9942, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1146752115977655e-05, |
| "loss": 3.9846, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1138366168467135e-05, |
| "loss": 3.9724, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1129980220956615e-05, |
| "loss": 3.9798, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1121594273446095e-05, |
| "loss": 3.9769, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1113208325935575e-05, |
| "loss": 3.9863, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1104822378425055e-05, |
| "loss": 3.9821, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1096436430914535e-05, |
| "loss": 3.9854, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1088050483404015e-05, |
| "loss": 4.0038, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1079664535893495e-05, |
| "loss": 3.9897, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1071278588382975e-05, |
| "loss": 3.9919, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1062892640872455e-05, |
| "loss": 3.9904, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1054506693361935e-05, |
| "loss": 3.9969, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1046120745851415e-05, |
| "loss": 3.9798, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1037751177144624e-05, |
| "loss": 3.9843, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1029365229634104e-05, |
| "loss": 3.9886, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1020979282123584e-05, |
| "loss": 3.9794, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1012593334613064e-05, |
| "loss": 3.9709, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.100422376590627e-05, |
| "loss": 3.9839, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.099585419719949e-05, |
| "loss": 3.992, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098746824968897e-05, |
| "loss": 3.9891, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097908230217845e-05, |
| "loss": 3.9842, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097069635466793e-05, |
| "loss": 3.9896, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.096232678596114e-05, |
| "loss": 3.9798, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.095394083845062e-05, |
| "loss": 3.9766, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.094557126974383e-05, |
| "loss": 3.9732, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.093718532223331e-05, |
| "loss": 3.9894, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092879937472279e-05, |
| "loss": 3.9768, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0920413427212267e-05, |
| "loss": 3.985, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0912027479701747e-05, |
| "loss": 3.9813, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0903641532191227e-05, |
| "loss": 3.9983, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.089525558468071e-05, |
| "loss": 3.976, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.088686963717019e-05, |
| "loss": 3.9858, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087848368965967e-05, |
| "loss": 3.9854, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087009774214915e-05, |
| "loss": 3.9744, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.086171179463863e-05, |
| "loss": 3.99, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.085332584712811e-05, |
| "loss": 3.9729, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.084495627842132e-05, |
| "loss": 3.9775, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.08365703309108e-05, |
| "loss": 3.9884, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082818438340028e-05, |
| "loss": 3.9793, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.081979843588976e-05, |
| "loss": 3.9757, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0811412488379235e-05, |
| "loss": 3.9612, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.080304291967245e-05, |
| "loss": 3.9819, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.079467335096567e-05, |
| "loss": 3.9624, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.078628740345515e-05, |
| "loss": 3.999, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077790145594463e-05, |
| "loss": 3.974, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076951550843411e-05, |
| "loss": 3.9918, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076112956092359e-05, |
| "loss": 3.9869, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.075274361341306e-05, |
| "loss": 3.969, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.074435766590254e-05, |
| "loss": 3.9722, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.073597171839202e-05, |
| "loss": 3.9784, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.07275857708815e-05, |
| "loss": 3.9686, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071921620217471e-05, |
| "loss": 3.9665, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071083025466419e-05, |
| "loss": 3.9904, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.070244430715367e-05, |
| "loss": 3.9883, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0694074738446885e-05, |
| "loss": 3.974, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0685688790936365e-05, |
| "loss": 3.9606, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0677302843425845e-05, |
| "loss": 3.9717, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0668916895915325e-05, |
| "loss": 3.9789, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0660547327208534e-05, |
| "loss": 3.9944, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0652161379698014e-05, |
| "loss": 3.98, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0643775432187494e-05, |
| "loss": 3.9801, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0635389484676973e-05, |
| "loss": 3.9802, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0627003537166453e-05, |
| "loss": 3.9856, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061863396845966e-05, |
| "loss": 3.9775, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061024802094914e-05, |
| "loss": 3.9765, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.060186207343862e-05, |
| "loss": 3.9839, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05934761259281e-05, |
| "loss": 3.9749, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058509017841759e-05, |
| "loss": 3.9768, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05767206097108e-05, |
| "loss": 3.9879, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.056833466220028e-05, |
| "loss": 3.9848, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055994871468976e-05, |
| "loss": 3.9741, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055156276717924e-05, |
| "loss": 3.9557, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.054317681966872e-05, |
| "loss": 3.9839, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05347908721582e-05, |
| "loss": 3.9819, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.052640492464768e-05, |
| "loss": 3.9806, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.051801897713716e-05, |
| "loss": 3.9798, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0509649408430367e-05, |
| "loss": 3.9809, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.050126346091985e-05, |
| "loss": 3.9756, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.049287751340933e-05, |
| "loss": 3.9713, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.048449156589881e-05, |
| "loss": 3.9743, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.047610561838829e-05, |
| "loss": 3.9839, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.046771967087777e-05, |
| "loss": 3.9868, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0459333723367243e-05, |
| "loss": 3.9874, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.045096415466046e-05, |
| "loss": 3.981, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0442578207149943e-05, |
| "loss": 3.9708, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0434192259639423e-05, |
| "loss": 3.9781, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0425806312128896e-05, |
| "loss": 3.9823, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.041743674342211e-05, |
| "loss": 3.9781, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.040906717471532e-05, |
| "loss": 3.9749, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0400697606008533e-05, |
| "loss": 3.9777, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0392311658498013e-05, |
| "loss": 3.9651, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0383925710987493e-05, |
| "loss": 3.9865, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0375539763476973e-05, |
| "loss": 3.9596, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0367153815966453e-05, |
| "loss": 3.9735, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0358767868455933e-05, |
| "loss": 3.982, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0350381920945413e-05, |
| "loss": 3.9727, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0341995973434896e-05, |
| "loss": 3.9721, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.033361002592437e-05, |
| "loss": 3.9777, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.032522407841385e-05, |
| "loss": 3.9727, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.031683813090333e-05, |
| "loss": 3.9796, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.030845218339281e-05, |
| "loss": 3.9764, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0300082614686022e-05, |
| "loss": 3.9795, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0291696667175502e-05, |
| "loss": 3.9798, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.028331071966498e-05, |
| "loss": 3.9896, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.027494115095819e-05, |
| "loss": 3.9764, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.026655520344767e-05, |
| "loss": 3.9721, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.025816925593715e-05, |
| "loss": 3.9761, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0249783308426634e-05, |
| "loss": 3.9793, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0241397360916114e-05, |
| "loss": 3.9707, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0233011413405594e-05, |
| "loss": 3.9735, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0224625465895074e-05, |
| "loss": 3.9751, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0216239518384554e-05, |
| "loss": 3.9846, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0207853570874034e-05, |
| "loss": 3.9756, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0199467623363514e-05, |
| "loss": 3.9762, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0191098054656726e-05, |
| "loss": 3.9672, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0182712107146206e-05, |
| "loss": 3.9757, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0174326159635686e-05, |
| "loss": 3.9714, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0165940212125166e-05, |
| "loss": 3.9849, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0157554264614646e-05, |
| "loss": 3.98, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0149168317104126e-05, |
| "loss": 3.9632, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0140798748397335e-05, |
| "loss": 3.9791, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0132412800886815e-05, |
| "loss": 3.9842, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.01240268533763e-05, |
| "loss": 3.9797, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.011564090586578e-05, |
| "loss": 3.975, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.010725495835526e-05, |
| "loss": 3.97, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0098885389648467e-05, |
| "loss": 3.9849, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0090499442137947e-05, |
| "loss": 3.989, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0082113494627427e-05, |
| "loss": 3.9743, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0073727547116907e-05, |
| "loss": 3.9776, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0065341599606384e-05, |
| "loss": 3.977, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.00569720308996e-05, |
| "loss": 3.9747, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.004858608338908e-05, |
| "loss": 3.9719, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0040200135878553e-05, |
| "loss": 3.9828, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0031814188368036e-05, |
| "loss": 3.9682, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0023428240857516e-05, |
| "loss": 3.9804, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0015058672150732e-05, |
| "loss": 3.9762, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.0006672724640205e-05, |
| "loss": 3.9794, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.038766860961914, |
| "eval_runtime": 318.7488, |
| "eval_samples_per_second": 1197.153, |
| "eval_steps_per_second": 37.412, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9998286777129685e-05, |
| "loss": 3.9681, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9989900829619165e-05, |
| "loss": 3.9685, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9981514882108645e-05, |
| "loss": 3.9848, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.997312893459813e-05, |
| "loss": 3.9764, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.996474298708761e-05, |
| "loss": 3.9878, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9956373418380817e-05, |
| "loss": 3.9689, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9947987470870297e-05, |
| "loss": 3.9802, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9939601523359777e-05, |
| "loss": 3.9705, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9931215575849257e-05, |
| "loss": 3.9798, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9922829628338737e-05, |
| "loss": 3.9775, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.991444368082822e-05, |
| "loss": 3.9813, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.99060577333177e-05, |
| "loss": 3.9829, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.989768816461091e-05, |
| "loss": 3.9712, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.988930221710039e-05, |
| "loss": 3.9644, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.988091626958987e-05, |
| "loss": 3.9724, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.987253032207935e-05, |
| "loss": 3.963, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.986414437456883e-05, |
| "loss": 3.9755, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9855758427058313e-05, |
| "loss": 3.9727, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9847372479547793e-05, |
| "loss": 3.9729, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9838986532037266e-05, |
| "loss": 3.9957, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9830616963330482e-05, |
| "loss": 3.9808, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9822231015819962e-05, |
| "loss": 3.9811, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9813845068309442e-05, |
| "loss": 3.9777, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.980545912079892e-05, |
| "loss": 3.9845, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.97970731732884e-05, |
| "loss": 3.9727, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9788719983385343e-05, |
| "loss": 3.9701, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9780334035874823e-05, |
| "loss": 3.9762, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9771948088364303e-05, |
| "loss": 3.9712, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9763562140853783e-05, |
| "loss": 3.9586, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9755176193343267e-05, |
| "loss": 3.9745, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.974679024583274e-05, |
| "loss": 3.9776, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.973840429832222e-05, |
| "loss": 3.9793, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.97300183508117e-05, |
| "loss": 3.9758, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9721648782104916e-05, |
| "loss": 3.9812, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9713279213398128e-05, |
| "loss": 3.9675, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9704893265887608e-05, |
| "loss": 3.9706, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9696507318377088e-05, |
| "loss": 3.9593, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9688121370866568e-05, |
| "loss": 3.9781, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.967973542335604e-05, |
| "loss": 3.9677, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.967134947584552e-05, |
| "loss": 3.9739, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9662963528335004e-05, |
| "loss": 3.9725, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9654577580824484e-05, |
| "loss": 3.9808, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9646208012117693e-05, |
| "loss": 3.9749, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9637822064607173e-05, |
| "loss": 3.9723, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9629436117096653e-05, |
| "loss": 3.9763, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9621050169586133e-05, |
| "loss": 3.9632, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9612664222075613e-05, |
| "loss": 3.9793, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9604294653368826e-05, |
| "loss": 3.9622, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9595908705858306e-05, |
| "loss": 3.9647, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9587539137151515e-05, |
| "loss": 3.9801, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9579153189640995e-05, |
| "loss": 3.9646, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9570767242130475e-05, |
| "loss": 3.9676, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9562397673423687e-05, |
| "loss": 3.9555, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9554011725913167e-05, |
| "loss": 3.9702, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9545625778402647e-05, |
| "loss": 3.9508, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9537239830892127e-05, |
| "loss": 3.989, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9528853883381607e-05, |
| "loss": 3.9626, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9520467935871087e-05, |
| "loss": 3.9836, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9512081988360567e-05, |
| "loss": 3.9758, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9503696040850047e-05, |
| "loss": 3.9592, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.949531009333953e-05, |
| "loss": 3.9581, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.948692414582901e-05, |
| "loss": 3.9691, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.947853819831849e-05, |
| "loss": 3.9589, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.947015225080797e-05, |
| "loss": 3.9569, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.946178268210118e-05, |
| "loss": 3.9801, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.945339673459066e-05, |
| "loss": 3.9743, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.944501078708014e-05, |
| "loss": 3.9642, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9436624839569622e-05, |
| "loss": 3.9519, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.942825527086283e-05, |
| "loss": 3.9599, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.941986932335231e-05, |
| "loss": 3.9711, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.941149975464552e-05, |
| "loss": 3.9775, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9403113807135e-05, |
| "loss": 3.9724, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9394727859624484e-05, |
| "loss": 3.9685, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9386341912113964e-05, |
| "loss": 3.9741, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9377955964603444e-05, |
| "loss": 3.9742, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9369570017092924e-05, |
| "loss": 3.9673, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9361184069582404e-05, |
| "loss": 3.9681, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9352798122071877e-05, |
| "loss": 3.9717, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.934441217456136e-05, |
| "loss": 3.9654, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9336042605854576e-05, |
| "loss": 3.9655, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.932765665834405e-05, |
| "loss": 3.9748, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.931927071083353e-05, |
| "loss": 3.9783, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.931088476332301e-05, |
| "loss": 3.9625, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9302531573419954e-05, |
| "loss": 3.9488, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9294162004713167e-05, |
| "loss": 3.97, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9285776057202647e-05, |
| "loss": 3.9711, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9277390109692127e-05, |
| "loss": 3.9706, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9269004162181607e-05, |
| "loss": 3.9674, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9260618214671086e-05, |
| "loss": 3.9707, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9252232267160566e-05, |
| "loss": 3.9651, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9243846319650046e-05, |
| "loss": 3.9603, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9235460372139523e-05, |
| "loss": 3.9659, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9227074424629003e-05, |
| "loss": 3.9683, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9218688477118483e-05, |
| "loss": 3.9772, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.92103189084117e-05, |
| "loss": 3.9786, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9201932960901175e-05, |
| "loss": 3.9749, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9193547013390655e-05, |
| "loss": 3.9564, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9185161065880135e-05, |
| "loss": 3.9684, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9176775118369615e-05, |
| "loss": 3.9724, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9168389170859095e-05, |
| "loss": 3.9695, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9160019602152304e-05, |
| "loss": 3.964, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9151633654641784e-05, |
| "loss": 3.9651, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9143247707131268e-05, |
| "loss": 3.959, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9134861759620747e-05, |
| "loss": 3.9787, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9126475812110227e-05, |
| "loss": 3.9492, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9118089864599707e-05, |
| "loss": 3.965, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9109703917089187e-05, |
| "loss": 3.9725, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9101317969578667e-05, |
| "loss": 3.9605, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9092948400871876e-05, |
| "loss": 3.9621, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.908456245336136e-05, |
| "loss": 3.9683, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.907617650585084e-05, |
| "loss": 3.961, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.906779055834032e-05, |
| "loss": 3.97, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.90594046108298e-05, |
| "loss": 3.9676, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.905103504212301e-05, |
| "loss": 3.9673, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.904264909461249e-05, |
| "loss": 3.9698, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.90342795259057e-05, |
| "loss": 3.9808, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.902589357839518e-05, |
| "loss": 3.9663, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.901750763088466e-05, |
| "loss": 3.9636, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.900912168337414e-05, |
| "loss": 3.9675, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.900073573586362e-05, |
| "loss": 3.9686, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.89923497883531e-05, |
| "loss": 3.9628, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8983980219646313e-05, |
| "loss": 3.9599, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8975594272135793e-05, |
| "loss": 3.967, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8967208324625273e-05, |
| "loss": 3.9732, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8958822377114753e-05, |
| "loss": 3.9692, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8950436429604233e-05, |
| "loss": 3.9648, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8942050482093706e-05, |
| "loss": 3.9562, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8933664534583186e-05, |
| "loss": 3.9617, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.892527858707267e-05, |
| "loss": 3.9592, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.891689263956215e-05, |
| "loss": 3.9782, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.890850669205163e-05, |
| "loss": 3.9718, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.890013712334484e-05, |
| "loss": 3.9558, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8891767554638055e-05, |
| "loss": 3.9662, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8883381607127535e-05, |
| "loss": 3.9714, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.887499565961701e-05, |
| "loss": 3.9651, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.886660971210649e-05, |
| "loss": 3.972, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.885822376459597e-05, |
| "loss": 3.9589, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.884983781708545e-05, |
| "loss": 3.9754, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.884146824837866e-05, |
| "loss": 3.9763, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.883308230086814e-05, |
| "loss": 3.9665, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8824696353357623e-05, |
| "loss": 3.9682, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8816310405847103e-05, |
| "loss": 3.9698, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8807924458336583e-05, |
| "loss": 3.9617, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8799554889629792e-05, |
| "loss": 3.962, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8791168942119272e-05, |
| "loss": 3.9754, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8782782994608752e-05, |
| "loss": 3.9578, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8774397047098232e-05, |
| "loss": 3.9663, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8766011099587716e-05, |
| "loss": 3.9679, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8757625152077196e-05, |
| "loss": 3.9662, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.033881187438965, |
| "eval_runtime": 317.8047, |
| "eval_samples_per_second": 1200.709, |
| "eval_steps_per_second": 37.523, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8749239204566676e-05, |
| "loss": 3.9574, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8740853257056155e-05, |
| "loss": 3.9613, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8732467309545635e-05, |
| "loss": 3.9769, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8724081362035115e-05, |
| "loss": 3.9622, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8715695414524595e-05, |
| "loss": 3.9778, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8707309467014072e-05, |
| "loss": 3.9581, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8698923519503552e-05, |
| "loss": 3.9725, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8690537571993032e-05, |
| "loss": 3.9613, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8682151624482512e-05, |
| "loss": 3.9666, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8673765676971992e-05, |
| "loss": 3.9683, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.866537972946147e-05, |
| "loss": 3.9677, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.865699378195095e-05, |
| "loss": 3.973, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8648624213244164e-05, |
| "loss": 3.9608, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8640238265733644e-05, |
| "loss": 3.9579, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8631852318223124e-05, |
| "loss": 3.9609, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8623466370712604e-05, |
| "loss": 3.9584, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8615080423202084e-05, |
| "loss": 3.9604, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8606694475691564e-05, |
| "loss": 3.9625, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8598308528181044e-05, |
| "loss": 3.9626, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8589922580670524e-05, |
| "loss": 3.9897, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8581536633160004e-05, |
| "loss": 3.9675, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8573150685649487e-05, |
| "loss": 3.9725, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.856476473813896e-05, |
| "loss": 3.9675, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.855637879062844e-05, |
| "loss": 3.9728, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8548009221921656e-05, |
| "loss": 3.9633, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8539623274411136e-05, |
| "loss": 3.9634, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8531237326900613e-05, |
| "loss": 3.9635, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8522851379390093e-05, |
| "loss": 3.9608, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.851448181068331e-05, |
| "loss": 3.9457, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.850609586317278e-05, |
| "loss": 3.9676, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.849770991566226e-05, |
| "loss": 3.9654, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.848932396815174e-05, |
| "loss": 3.9717, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8480938020641225e-05, |
| "loss": 3.9645, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8472568451934434e-05, |
| "loss": 3.9684, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.846419888322765e-05, |
| "loss": 3.9595, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.845581293571713e-05, |
| "loss": 3.9623, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.844742698820661e-05, |
| "loss": 3.9517, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8439041040696086e-05, |
| "loss": 3.963, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8430655093185566e-05, |
| "loss": 3.9618, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8422269145675046e-05, |
| "loss": 3.9602, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8413883198164526e-05, |
| "loss": 3.963, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8405497250654006e-05, |
| "loss": 3.9715, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8397111303143486e-05, |
| "loss": 3.9647, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8388741734436695e-05, |
| "loss": 3.9651, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.838035578692618e-05, |
| "loss": 3.9674, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.837196983941566e-05, |
| "loss": 3.9535, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8363600270708868e-05, |
| "loss": 3.9665, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8355214323198348e-05, |
| "loss": 3.9559, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8346828375687828e-05, |
| "loss": 3.9512, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8338442428177307e-05, |
| "loss": 3.9695, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8330056480666787e-05, |
| "loss": 3.9561, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.832168691196e-05, |
| "loss": 3.9556, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.831330096444948e-05, |
| "loss": 3.9487, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.830491501693896e-05, |
| "loss": 3.9586, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.829652906942844e-05, |
| "loss": 3.944, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.828815950072165e-05, |
| "loss": 3.975, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8279773553211132e-05, |
| "loss": 3.9538, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.827140398450434e-05, |
| "loss": 3.9682, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.826301803699382e-05, |
| "loss": 3.9676, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.82546320894833e-05, |
| "loss": 3.9527, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.824624614197278e-05, |
| "loss": 3.9467, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.823786019446226e-05, |
| "loss": 3.9602, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.822947424695174e-05, |
| "loss": 3.9465, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8221088299441224e-05, |
| "loss": 3.9472, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8212702351930704e-05, |
| "loss": 3.9655, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8204316404420184e-05, |
| "loss": 3.9669, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8195930456909664e-05, |
| "loss": 3.9528, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8187560888202873e-05, |
| "loss": 3.947, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8179174940692353e-05, |
| "loss": 3.951, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8170788993181833e-05, |
| "loss": 3.9606, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8162403045671317e-05, |
| "loss": 3.9639, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8154017098160797e-05, |
| "loss": 3.9655, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8145647529454006e-05, |
| "loss": 3.9601, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8137261581943486e-05, |
| "loss": 3.9636, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8128875634432966e-05, |
| "loss": 3.9677, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8120489686922442e-05, |
| "loss": 3.9516, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8112103739411922e-05, |
| "loss": 3.9616, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8103750549508867e-05, |
| "loss": 3.9632, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8095364601998347e-05, |
| "loss": 3.951, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8086978654487827e-05, |
| "loss": 3.9612, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8078592706977307e-05, |
| "loss": 3.964, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8070206759466787e-05, |
| "loss": 3.968, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.806182081195627e-05, |
| "loss": 3.9495, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8053434864445744e-05, |
| "loss": 3.9422, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.804506529573896e-05, |
| "loss": 3.9593, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.803667934822844e-05, |
| "loss": 3.9606, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8028293400717916e-05, |
| "loss": 3.9619, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8019907453207396e-05, |
| "loss": 3.9589, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8011537884500612e-05, |
| "loss": 3.9585, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.800315193699009e-05, |
| "loss": 3.9588, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7994765989479565e-05, |
| "loss": 3.9487, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7986380041969045e-05, |
| "loss": 3.9554, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7977994094458525e-05, |
| "loss": 3.9581, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7969608146948008e-05, |
| "loss": 3.9668, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7961222199437488e-05, |
| "loss": 3.9689, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7952836251926968e-05, |
| "loss": 3.9653, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7944450304416448e-05, |
| "loss": 3.9479, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7936080735709657e-05, |
| "loss": 3.959, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7927694788199137e-05, |
| "loss": 3.9567, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.791932521949235e-05, |
| "loss": 3.965, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.791093927198183e-05, |
| "loss": 3.9528, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.790255332447131e-05, |
| "loss": 3.9531, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.789416737696079e-05, |
| "loss": 3.9485, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.788578142945027e-05, |
| "loss": 3.9661, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.787741186074348e-05, |
| "loss": 3.938, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.786902591323296e-05, |
| "loss": 3.9556, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7860639965722442e-05, |
| "loss": 3.9621, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7852254018211922e-05, |
| "loss": 3.9507, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.78438680707014e-05, |
| "loss": 3.9502, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.783549850199461e-05, |
| "loss": 3.9619, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.782711255448409e-05, |
| "loss": 3.9511, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.781872660697357e-05, |
| "loss": 3.9543, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.781034065946305e-05, |
| "loss": 3.9605, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7801954711952534e-05, |
| "loss": 3.9534, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7793568764442014e-05, |
| "loss": 3.9616, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7785199195735223e-05, |
| "loss": 3.9717, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7776813248224703e-05, |
| "loss": 3.9574, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7768427300714183e-05, |
| "loss": 3.9522, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7760041353203663e-05, |
| "loss": 3.9598, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7751655405693143e-05, |
| "loss": 3.9573, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7743285836986355e-05, |
| "loss": 3.9507, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7734899889475835e-05, |
| "loss": 3.953, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7726513941965315e-05, |
| "loss": 3.9532, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7718127994454795e-05, |
| "loss": 3.9685, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7709742046944275e-05, |
| "loss": 3.9587, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7701356099433752e-05, |
| "loss": 3.957, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.769297015192323e-05, |
| "loss": 3.9468, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.768458420441271e-05, |
| "loss": 3.9517, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7676214635705927e-05, |
| "loss": 3.95, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.76678286881954e-05, |
| "loss": 3.9675, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.765944274068488e-05, |
| "loss": 3.9627, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7651056793174364e-05, |
| "loss": 3.946, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7642670845663844e-05, |
| "loss": 3.9555, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7634301276957053e-05, |
| "loss": 3.9615, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7625915329446533e-05, |
| "loss": 3.957, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7617529381936013e-05, |
| "loss": 3.9656, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7609143434425493e-05, |
| "loss": 3.9511, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7600757486914973e-05, |
| "loss": 3.9631, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7592387918208185e-05, |
| "loss": 3.966, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7584001970697665e-05, |
| "loss": 3.9547, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7575632401990874e-05, |
| "loss": 3.9585, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7567246454480354e-05, |
| "loss": 3.9617, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.755887688577357e-05, |
| "loss": 3.9524, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.755049093826305e-05, |
| "loss": 3.9526, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7542104990752527e-05, |
| "loss": 3.9629, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7533719043242007e-05, |
| "loss": 3.9493, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7525333095731487e-05, |
| "loss": 3.9576, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7516947148220967e-05, |
| "loss": 3.9595, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7508561200710447e-05, |
| "loss": 3.9543, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.02894926071167, |
| "eval_runtime": 300.6765, |
| "eval_samples_per_second": 1269.108, |
| "eval_steps_per_second": 39.661, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7500175253199927e-05, |
| "loss": 3.9479, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.749178930568941e-05, |
| "loss": 3.9532, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.748340335817889e-05, |
| "loss": 3.9669, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.747501741066837e-05, |
| "loss": 3.9498, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.746663146315785e-05, |
| "loss": 3.9692, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.745824551564733e-05, |
| "loss": 3.9502, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.744985956813681e-05, |
| "loss": 3.9615, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.744147362062629e-05, |
| "loss": 3.947, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7433087673115766e-05, |
| "loss": 3.9648, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7424701725605246e-05, |
| "loss": 3.958, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7416315778094726e-05, |
| "loss": 3.9571, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7407929830584206e-05, |
| "loss": 3.9656, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7399560261877415e-05, |
| "loss": 3.9503, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7391174314366895e-05, |
| "loss": 3.9477, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7382788366856375e-05, |
| "loss": 3.95, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.737440241934586e-05, |
| "loss": 3.9468, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.736601647183534e-05, |
| "loss": 3.9505, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7357630524324818e-05, |
| "loss": 3.9532, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7349244576814298e-05, |
| "loss": 3.9513, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7340858629303778e-05, |
| "loss": 3.9794, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7332472681793258e-05, |
| "loss": 3.9586, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7324086734282738e-05, |
| "loss": 3.9631, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7315700786772218e-05, |
| "loss": 3.9601, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7307314839261698e-05, |
| "loss": 3.9623, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.729894527055491e-05, |
| "loss": 3.9554, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.729055932304439e-05, |
| "loss": 3.9529, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.728217337553387e-05, |
| "loss": 3.9559, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.727378742802335e-05, |
| "loss": 3.95, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.726541785931656e-05, |
| "loss": 3.9387, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7257031911806043e-05, |
| "loss": 3.9551, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7248645964295523e-05, |
| "loss": 3.9602, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7240260016785003e-05, |
| "loss": 3.9577, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7231874069274476e-05, |
| "loss": 3.959, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7223488121763956e-05, |
| "loss": 3.9615, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7215118553057172e-05, |
| "loss": 3.9504, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7206748984350384e-05, |
| "loss": 3.949, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7198379415643593e-05, |
| "loss": 3.9454, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7189993468133073e-05, |
| "loss": 3.9494, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7181607520622553e-05, |
| "loss": 3.9509, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7173221573112033e-05, |
| "loss": 3.9494, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7164835625601513e-05, |
| "loss": 3.9563, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7156449678090996e-05, |
| "loss": 3.9614, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7148080109384206e-05, |
| "loss": 3.9512, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7139694161873686e-05, |
| "loss": 3.9564, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7131308214363165e-05, |
| "loss": 3.9604, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7122922266852645e-05, |
| "loss": 3.9419, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7114536319342125e-05, |
| "loss": 3.9569, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7106150371831602e-05, |
| "loss": 3.9512, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7097764424321082e-05, |
| "loss": 3.9378, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7089378476810562e-05, |
| "loss": 3.9602, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7080992529300042e-05, |
| "loss": 3.9492, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7072606581789522e-05, |
| "loss": 3.9443, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7064220634279002e-05, |
| "loss": 3.9439, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7055851065572214e-05, |
| "loss": 3.9482, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7047465118061694e-05, |
| "loss": 3.9322, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7039079170551174e-05, |
| "loss": 3.9597, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7030693223040654e-05, |
| "loss": 3.9474, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7022307275530134e-05, |
| "loss": 3.9583, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7013921328019614e-05, |
| "loss": 3.96, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7005535380509094e-05, |
| "loss": 3.9432, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6997149432998574e-05, |
| "loss": 3.9378, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6988763485488054e-05, |
| "loss": 3.955, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6980393916781266e-05, |
| "loss": 3.9382, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6972007969270746e-05, |
| "loss": 3.938, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6963638400563955e-05, |
| "loss": 3.956, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6955252453053435e-05, |
| "loss": 3.9611, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.694686650554292e-05, |
| "loss": 3.9417, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.69384805580324e-05, |
| "loss": 3.9382, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.693009461052188e-05, |
| "loss": 3.9393, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.692170866301136e-05, |
| "loss": 3.9483, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6913339094304568e-05, |
| "loss": 3.9557, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6904953146794048e-05, |
| "loss": 3.9517, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6896567199283528e-05, |
| "loss": 3.9556, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6888181251773008e-05, |
| "loss": 3.9525, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.687981168306622e-05, |
| "loss": 3.9595, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.68714257355557e-05, |
| "loss": 3.9426, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.686303978804518e-05, |
| "loss": 3.9501, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.685465384053466e-05, |
| "loss": 3.9564, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6846267893024136e-05, |
| "loss": 3.9396, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6837898324317352e-05, |
| "loss": 3.9504, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6829512376806832e-05, |
| "loss": 3.9586, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6821126429296312e-05, |
| "loss": 3.9578, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6812740481785785e-05, |
| "loss": 3.9422, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6804354534275265e-05, |
| "loss": 3.9296, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.679598496556848e-05, |
| "loss": 3.953, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6787599018057958e-05, |
| "loss": 3.9447, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6779213070547438e-05, |
| "loss": 3.9566, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6770827123036918e-05, |
| "loss": 3.9499, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6762457554330134e-05, |
| "loss": 3.9502, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6754071606819607e-05, |
| "loss": 3.9493, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.674568565930909e-05, |
| "loss": 3.9391, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.673729971179857e-05, |
| "loss": 3.9485, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.672891376428805e-05, |
| "loss": 3.9477, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.672054419558126e-05, |
| "loss": 3.961, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.671215824807074e-05, |
| "loss": 3.9607, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.670377230056022e-05, |
| "loss": 3.9546, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.66953863530497e-05, |
| "loss": 3.9351, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6687000405539182e-05, |
| "loss": 3.9521, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.667863083683239e-05, |
| "loss": 3.9466, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.667024488932187e-05, |
| "loss": 3.9555, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.666185894181135e-05, |
| "loss": 3.9436, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.665347299430083e-05, |
| "loss": 3.9461, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.664508704679031e-05, |
| "loss": 3.9381, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.663670109927979e-05, |
| "loss": 3.9561, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6628315151769275e-05, |
| "loss": 3.9296, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6619929204258755e-05, |
| "loss": 3.9483, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6611559635551964e-05, |
| "loss": 3.9442, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6603173688041444e-05, |
| "loss": 3.948, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6594787740530924e-05, |
| "loss": 3.9412, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6586401793020403e-05, |
| "loss": 3.953, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6578015845509883e-05, |
| "loss": 3.9489, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6569629897999367e-05, |
| "loss": 3.944, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6561260329292576e-05, |
| "loss": 3.9505, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6552874381782056e-05, |
| "loss": 3.9395, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6544488434271536e-05, |
| "loss": 3.9588, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6536102486761016e-05, |
| "loss": 3.965, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6527716539250496e-05, |
| "loss": 3.9452, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6519330591739972e-05, |
| "loss": 3.9453, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6510944644229452e-05, |
| "loss": 3.9467, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6502558696718932e-05, |
| "loss": 3.9489, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6494189128012148e-05, |
| "loss": 3.9425, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6485819559305357e-05, |
| "loss": 3.9463, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6477433611794837e-05, |
| "loss": 3.9414, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.646904766428432e-05, |
| "loss": 3.9596, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6460661716773794e-05, |
| "loss": 3.9514, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6452275769263274e-05, |
| "loss": 3.9453, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6443889821752754e-05, |
| "loss": 3.9386, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6435503874242233e-05, |
| "loss": 3.9439, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6427117926731713e-05, |
| "loss": 3.9374, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6418731979221197e-05, |
| "loss": 3.9593, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6410362410514406e-05, |
| "loss": 3.9553, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6401976463003886e-05, |
| "loss": 3.9408, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6393606894297095e-05, |
| "loss": 3.9419, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6385220946786575e-05, |
| "loss": 3.9529, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6376834999276058e-05, |
| "loss": 3.9468, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6368449051765538e-05, |
| "loss": 3.9586, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6360063104255018e-05, |
| "loss": 3.9386, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6351677156744498e-05, |
| "loss": 3.9581, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6343291209233978e-05, |
| "loss": 3.9509, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6334905261723458e-05, |
| "loss": 3.9495, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6326519314212938e-05, |
| "loss": 3.951, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6318149745506147e-05, |
| "loss": 3.9533, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.630976379799563e-05, |
| "loss": 3.9429, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.630137785048511e-05, |
| "loss": 3.9459, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.629299190297459e-05, |
| "loss": 3.9535, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.62846223342678e-05, |
| "loss": 3.9437, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.627623638675728e-05, |
| "loss": 3.9448, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.626785043924676e-05, |
| "loss": 3.9484, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.625946449173624e-05, |
| "loss": 3.947, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.0246968269348145, |
| "eval_runtime": 302.6049, |
| "eval_samples_per_second": 1261.02, |
| "eval_steps_per_second": 39.408, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6251094923029452e-05, |
| "loss": 3.9416, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6242708975518932e-05, |
| "loss": 3.9414, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.623432302800841e-05, |
| "loss": 3.9581, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.622593708049789e-05, |
| "loss": 3.9409, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.62175675117911e-05, |
| "loss": 3.9664, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6209181564280584e-05, |
| "loss": 3.9373, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6200795616770064e-05, |
| "loss": 3.9547, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6192409669259544e-05, |
| "loss": 3.9431, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6184023721749024e-05, |
| "loss": 3.9474, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6175654153042233e-05, |
| "loss": 3.9512, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6167268205531713e-05, |
| "loss": 3.9469, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6158882258021193e-05, |
| "loss": 3.9555, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6150512689314405e-05, |
| "loss": 3.9436, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6142126741803885e-05, |
| "loss": 3.9385, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6133740794293365e-05, |
| "loss": 3.9468, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6125354846782845e-05, |
| "loss": 3.9333, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6116968899272325e-05, |
| "loss": 3.9455, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6108582951761805e-05, |
| "loss": 3.9469, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6100197004251282e-05, |
| "loss": 3.9427, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6091827435544498e-05, |
| "loss": 3.9668, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6083441488033978e-05, |
| "loss": 3.9536, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.607505554052345e-05, |
| "loss": 3.9564, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.606666959301293e-05, |
| "loss": 3.9492, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6058283645502414e-05, |
| "loss": 3.9505, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6049897697991894e-05, |
| "loss": 3.9466, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6041511750481374e-05, |
| "loss": 3.9448, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6033125802970854e-05, |
| "loss": 3.9476, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6024756234264063e-05, |
| "loss": 3.9402, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6016370286753543e-05, |
| "loss": 3.9366, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6007984339243023e-05, |
| "loss": 3.9409, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5999598391732506e-05, |
| "loss": 3.9529, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5991228823025715e-05, |
| "loss": 3.9513, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5982842875515195e-05, |
| "loss": 3.9522, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5974456928004675e-05, |
| "loss": 3.9548, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5966087359297884e-05, |
| "loss": 3.9351, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5957701411787368e-05, |
| "loss": 3.9426, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5949315464276848e-05, |
| "loss": 3.9415, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5940929516766328e-05, |
| "loss": 3.9374, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5932543569255808e-05, |
| "loss": 3.9437, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5924157621745288e-05, |
| "loss": 3.9381, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5915771674234768e-05, |
| "loss": 3.9485, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5907385726724247e-05, |
| "loss": 3.9507, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.589901615801746e-05, |
| "loss": 3.9455, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.589063021050694e-05, |
| "loss": 3.9428, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.588224426299642e-05, |
| "loss": 3.9526, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.58738583154859e-05, |
| "loss": 3.9351, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.586547236797538e-05, |
| "loss": 3.9485, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.585710279926859e-05, |
| "loss": 3.943, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.58487332305618e-05, |
| "loss": 3.9286, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.584034728305128e-05, |
| "loss": 3.9538, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.583196133554076e-05, |
| "loss": 3.94, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.582357538803024e-05, |
| "loss": 3.9415, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.581518944051972e-05, |
| "loss": 3.9315, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.580681987181293e-05, |
| "loss": 3.9392, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5798433924302414e-05, |
| "loss": 3.925, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5790047976791894e-05, |
| "loss": 3.9483, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5781662029281374e-05, |
| "loss": 3.9409, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5773276081770854e-05, |
| "loss": 3.9484, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5764890134260333e-05, |
| "loss": 3.9503, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5756504186749813e-05, |
| "loss": 3.9349, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.574811823923929e-05, |
| "loss": 3.9287, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.573973229172877e-05, |
| "loss": 3.9492, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5731362723021986e-05, |
| "loss": 3.9311, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5722976775511466e-05, |
| "loss": 3.9295, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.571459082800094e-05, |
| "loss": 3.9505, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.570620488049042e-05, |
| "loss": 3.9482, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5697835311783635e-05, |
| "loss": 3.9351, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.568944936427311e-05, |
| "loss": 3.9333, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.568106341676259e-05, |
| "loss": 3.9267, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.567267746925207e-05, |
| "loss": 3.9358, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.566429152174155e-05, |
| "loss": 3.949, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5655938331838496e-05, |
| "loss": 3.9482, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5647552384327976e-05, |
| "loss": 3.9447, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5639166436817456e-05, |
| "loss": 3.9394, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.563078048930694e-05, |
| "loss": 3.9599, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5622394541796413e-05, |
| "loss": 3.933, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5614008594285893e-05, |
| "loss": 3.9441, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.560563902557911e-05, |
| "loss": 3.9427, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5597253078068585e-05, |
| "loss": 3.9292, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5588867130558065e-05, |
| "loss": 3.9482, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5580481183047545e-05, |
| "loss": 3.9487, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5572095235537025e-05, |
| "loss": 3.9456, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5563725666830234e-05, |
| "loss": 3.9364, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5555339719319714e-05, |
| "loss": 3.9267, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5546953771809194e-05, |
| "loss": 3.9431, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5538567824298677e-05, |
| "loss": 3.9404, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5530181876788157e-05, |
| "loss": 3.9468, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5521795929277637e-05, |
| "loss": 3.9362, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5513426360570846e-05, |
| "loss": 3.9447, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5505040413060326e-05, |
| "loss": 3.9368, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5496654465549806e-05, |
| "loss": 3.9367, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5488268518039286e-05, |
| "loss": 3.9355, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.547988257052877e-05, |
| "loss": 3.9346, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.547151300182198e-05, |
| "loss": 3.9564, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.546312705431146e-05, |
| "loss": 3.9485, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.545474110680094e-05, |
| "loss": 3.9529, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.544635515929042e-05, |
| "loss": 3.9266, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.543798559058363e-05, |
| "loss": 3.9424, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.542959964307311e-05, |
| "loss": 3.9405, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.542121369556259e-05, |
| "loss": 3.946, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.541282774805207e-05, |
| "loss": 3.9368, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.540444180054155e-05, |
| "loss": 3.9379, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.539605585303103e-05, |
| "loss": 3.9292, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.538766990552051e-05, |
| "loss": 3.9519, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.537928395800999e-05, |
| "loss": 3.9233, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.537089801049947e-05, |
| "loss": 3.9377, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5362544820596412e-05, |
| "loss": 3.9379, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5354158873085892e-05, |
| "loss": 3.9361, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5345772925575372e-05, |
| "loss": 3.9336, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5337386978064852e-05, |
| "loss": 3.9466, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5329001030554332e-05, |
| "loss": 3.9387, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5320631461847545e-05, |
| "loss": 3.933, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5312245514337024e-05, |
| "loss": 3.9471, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5303859566826504e-05, |
| "loss": 3.9342, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5295473619315984e-05, |
| "loss": 3.9466, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5287087671805464e-05, |
| "loss": 3.9566, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5278701724294944e-05, |
| "loss": 3.9402, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5270332155588157e-05, |
| "loss": 3.9366, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5261946208077637e-05, |
| "loss": 3.9352, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5253560260567117e-05, |
| "loss": 3.9392, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5245174313056597e-05, |
| "loss": 3.936, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.523678836554607e-05, |
| "loss": 3.9437, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5228418796839286e-05, |
| "loss": 3.9274, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.522003284932877e-05, |
| "loss": 3.9519, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.521164690181825e-05, |
| "loss": 3.9453, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5203260954307722e-05, |
| "loss": 3.939, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5194875006797202e-05, |
| "loss": 3.9263, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5186505438090418e-05, |
| "loss": 3.9375, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5178119490579895e-05, |
| "loss": 3.9329, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5169733543069375e-05, |
| "loss": 3.9457, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5161347595558854e-05, |
| "loss": 3.9516, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5152961648048334e-05, |
| "loss": 3.939, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5144592079341544e-05, |
| "loss": 3.9285, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5136206131831023e-05, |
| "loss": 3.9461, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5127820184320507e-05, |
| "loss": 3.9366, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5119450615613716e-05, |
| "loss": 3.9536, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5111064668103196e-05, |
| "loss": 3.9282, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5102678720592676e-05, |
| "loss": 3.9475, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5094292773082156e-05, |
| "loss": 3.9444, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5085906825571636e-05, |
| "loss": 3.9463, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5077520878061116e-05, |
| "loss": 3.9422, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.50691349305506e-05, |
| "loss": 3.9427, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.506074898304008e-05, |
| "loss": 3.9325, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5052379414333288e-05, |
| "loss": 3.9398, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5043993466822768e-05, |
| "loss": 3.9417, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5035607519312248e-05, |
| "loss": 3.9393, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5027221571801728e-05, |
| "loss": 3.936, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5018835624291208e-05, |
| "loss": 3.9427, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5010449676780688e-05, |
| "loss": 3.9364, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.020267009735107, |
| "eval_runtime": 301.3693, |
| "eval_samples_per_second": 1266.191, |
| "eval_steps_per_second": 39.569, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.500206372927017e-05, |
| "loss": 3.9304, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4993677781759648e-05, |
| "loss": 3.9376, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4985291834249128e-05, |
| "loss": 3.9485, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4976905886738608e-05, |
| "loss": 3.9356, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4968519939228088e-05, |
| "loss": 3.9504, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4960133991717568e-05, |
| "loss": 3.937, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4951748044207048e-05, |
| "loss": 3.9421, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4943362096696528e-05, |
| "loss": 3.9364, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4934976149186007e-05, |
| "loss": 3.9395, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4926590201675487e-05, |
| "loss": 3.9435, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4918204254164967e-05, |
| "loss": 3.9407, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4909818306654447e-05, |
| "loss": 3.9482, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4901448737947656e-05, |
| "loss": 3.9361, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.489306279043714e-05, |
| "loss": 3.9324, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.488467684292662e-05, |
| "loss": 3.9365, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.48762908954161e-05, |
| "loss": 3.9248, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.486790494790558e-05, |
| "loss": 3.9376, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4859519000395056e-05, |
| "loss": 3.9389, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4851133052884536e-05, |
| "loss": 3.9353, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4842747105374016e-05, |
| "loss": 3.9577, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4834377536667232e-05, |
| "loss": 3.9442, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.482599158915671e-05, |
| "loss": 3.9493, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.481760564164619e-05, |
| "loss": 3.9428, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.480921969413567e-05, |
| "loss": 3.9443, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.480083374662515e-05, |
| "loss": 3.9352, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.479244779911463e-05, |
| "loss": 3.9417, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.478407823040784e-05, |
| "loss": 3.9373, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.477569228289732e-05, |
| "loss": 3.9345, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.47673063353868e-05, |
| "loss": 3.9301, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.475892038787628e-05, |
| "loss": 3.9327, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.475053444036576e-05, |
| "loss": 3.9398, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4742148492855237e-05, |
| "loss": 3.9448, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4733762545344717e-05, |
| "loss": 3.9416, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.47253765978342e-05, |
| "loss": 3.9429, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4717007029127413e-05, |
| "loss": 3.929, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.470862108161689e-05, |
| "loss": 3.938, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.470023513410637e-05, |
| "loss": 3.9359, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.469184918659585e-05, |
| "loss": 3.9262, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.468346323908533e-05, |
| "loss": 3.9387, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.467507729157481e-05, |
| "loss": 3.928, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4666707722868022e-05, |
| "loss": 3.9436, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4658321775357502e-05, |
| "loss": 3.9454, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4649935827846982e-05, |
| "loss": 3.9372, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4641549880336462e-05, |
| "loss": 3.9321, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4633163932825942e-05, |
| "loss": 3.9474, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.462477798531542e-05, |
| "loss": 3.9266, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4616408416608634e-05, |
| "loss": 3.9417, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4608022469098114e-05, |
| "loss": 3.9345, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.459963652158759e-05, |
| "loss": 3.9209, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.459125057407707e-05, |
| "loss": 3.9467, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.458286462656655e-05, |
| "loss": 3.9322, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.457447867905603e-05, |
| "loss": 3.9325, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4566109110349243e-05, |
| "loss": 3.9268, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4557723162838723e-05, |
| "loss": 3.9289, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4549337215328203e-05, |
| "loss": 3.9232, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4540967646621415e-05, |
| "loss": 3.9343, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4532581699110892e-05, |
| "loss": 3.9371, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4524195751600372e-05, |
| "loss": 3.939, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4515809804089855e-05, |
| "loss": 3.943, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4507423856579335e-05, |
| "loss": 3.9328, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4499037909068815e-05, |
| "loss": 3.9204, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4490651961558295e-05, |
| "loss": 3.9407, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4482266014047772e-05, |
| "loss": 3.9216, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4473880066537252e-05, |
| "loss": 3.9186, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.446549411902673e-05, |
| "loss": 3.9456, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.445710817151621e-05, |
| "loss": 3.9397, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4448738602809424e-05, |
| "loss": 3.9294, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4440352655298904e-05, |
| "loss": 3.9264, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4431966707788384e-05, |
| "loss": 3.9167, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4423580760277864e-05, |
| "loss": 3.9287, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4415194812767344e-05, |
| "loss": 3.9428, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4406825244060556e-05, |
| "loss": 3.9369, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4398439296550036e-05, |
| "loss": 3.9374, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4390053349039516e-05, |
| "loss": 3.9328, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4381667401528996e-05, |
| "loss": 3.9532, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4373281454018476e-05, |
| "loss": 3.9254, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4364895506507953e-05, |
| "loss": 3.9399, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4356509558997433e-05, |
| "loss": 3.937, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4348123611486913e-05, |
| "loss": 3.9157, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.433975404278013e-05, |
| "loss": 3.9431, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4331368095269605e-05, |
| "loss": 3.9397, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4322982147759085e-05, |
| "loss": 3.9391, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4314596200248565e-05, |
| "loss": 3.9334, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4306210252738045e-05, |
| "loss": 3.918, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4297840684031258e-05, |
| "loss": 3.9343, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4289454736520737e-05, |
| "loss": 3.9309, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.428108516781395e-05, |
| "loss": 3.9416, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4272699220303427e-05, |
| "loss": 3.9303, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4264313272792906e-05, |
| "loss": 3.9356, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4255927325282386e-05, |
| "loss": 3.933, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4247541377771866e-05, |
| "loss": 3.9306, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.423915543026135e-05, |
| "loss": 3.9284, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.423076948275083e-05, |
| "loss": 3.9292, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.422238353524031e-05, |
| "loss": 3.9459, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.421401396653352e-05, |
| "loss": 3.9423, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4205628019023e-05, |
| "loss": 3.9459, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.419725845031621e-05, |
| "loss": 3.9175, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.418887250280569e-05, |
| "loss": 3.9357, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.418048655529517e-05, |
| "loss": 3.9327, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.417210060778465e-05, |
| "loss": 3.9368, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.416371466027413e-05, |
| "loss": 3.9317, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4155328712763608e-05, |
| "loss": 3.9265, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.414695914405682e-05, |
| "loss": 3.9277, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4138573196546303e-05, |
| "loss": 3.9411, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4130187249035783e-05, |
| "loss": 3.9147, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.412180130152526e-05, |
| "loss": 3.9294, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.411341535401474e-05, |
| "loss": 3.9268, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.410502940650422e-05, |
| "loss": 3.936, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.40966434589937e-05, |
| "loss": 3.9243, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.408825751148318e-05, |
| "loss": 3.9353, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.407987156397266e-05, |
| "loss": 3.9332, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4071501995265872e-05, |
| "loss": 3.9249, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4063116047755352e-05, |
| "loss": 3.9375, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4054730100244832e-05, |
| "loss": 3.9259, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.404636053153804e-05, |
| "loss": 3.9403, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.403797458402752e-05, |
| "loss": 3.9457, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4029605015320734e-05, |
| "loss": 3.9325, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4021219067810214e-05, |
| "loss": 3.9287, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4012833120299694e-05, |
| "loss": 3.9322, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4004447172789174e-05, |
| "loss": 3.929, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3996061225278653e-05, |
| "loss": 3.9302, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3987675277768133e-05, |
| "loss": 3.9345, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3979289330257613e-05, |
| "loss": 3.922, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3970903382747093e-05, |
| "loss": 3.9419, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3962517435236573e-05, |
| "loss": 3.9354, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3954131487726053e-05, |
| "loss": 3.9349, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3945745540215533e-05, |
| "loss": 3.9196, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3937359592705013e-05, |
| "loss": 3.9292, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3928990023998226e-05, |
| "loss": 3.9257, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3920604076487706e-05, |
| "loss": 3.9358, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3912218128977186e-05, |
| "loss": 3.9414, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3903832181466665e-05, |
| "loss": 3.9343, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3895446233956145e-05, |
| "loss": 3.9195, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3887076665249355e-05, |
| "loss": 3.9403, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3878690717738835e-05, |
| "loss": 3.9289, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3870304770228314e-05, |
| "loss": 3.944, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3861918822717794e-05, |
| "loss": 3.9224, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3853532875207274e-05, |
| "loss": 3.9431, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3845163306500487e-05, |
| "loss": 3.9366, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3836777358989967e-05, |
| "loss": 3.9401, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3828391411479443e-05, |
| "loss": 3.9324, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3820005463968927e-05, |
| "loss": 3.9359, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3811619516458407e-05, |
| "loss": 3.9248, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3803233568947887e-05, |
| "loss": 3.934, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3794847621437367e-05, |
| "loss": 3.9342, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3786461673926847e-05, |
| "loss": 3.9325, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3778092105220056e-05, |
| "loss": 3.9293, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3769722536513268e-05, |
| "loss": 3.9333, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3761336589002748e-05, |
| "loss": 3.9341, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.0167670249938965, |
| "eval_runtime": 296.3176, |
| "eval_samples_per_second": 1287.777, |
| "eval_steps_per_second": 40.244, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3752950641492228e-05, |
| "loss": 3.931, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3744564693981708e-05, |
| "loss": 3.9274, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3736178746471188e-05, |
| "loss": 3.9404, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3727792798960668e-05, |
| "loss": 3.9285, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3719406851450148e-05, |
| "loss": 3.942, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3711020903939628e-05, |
| "loss": 3.9305, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.370265133523284e-05, |
| "loss": 3.9329, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.369426538772232e-05, |
| "loss": 3.9344, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.36858794402118e-05, |
| "loss": 3.931, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3677493492701277e-05, |
| "loss": 3.9342, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3669107545190757e-05, |
| "loss": 3.9364, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3660737976483973e-05, |
| "loss": 3.9382, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3652368407777182e-05, |
| "loss": 3.9312, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.364398246026666e-05, |
| "loss": 3.9241, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.363559651275614e-05, |
| "loss": 3.9316, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.362721056524562e-05, |
| "loss": 3.9168, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3618824617735098e-05, |
| "loss": 3.9306, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.361043867022458e-05, |
| "loss": 3.9293, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.360205272271406e-05, |
| "loss": 3.9297, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.359366677520354e-05, |
| "loss": 3.9506, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.358529720649675e-05, |
| "loss": 3.9374, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.357691125898623e-05, |
| "loss": 3.9428, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.356852531147571e-05, |
| "loss": 3.934, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.356013936396519e-05, |
| "loss": 3.9359, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3551753416454674e-05, |
| "loss": 3.9298, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3543400226551615e-05, |
| "loss": 3.9342, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3535014279041095e-05, |
| "loss": 3.9278, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3526628331530572e-05, |
| "loss": 3.9277, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3518242384020052e-05, |
| "loss": 3.9239, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3509872815313268e-05, |
| "loss": 3.9268, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3501486867802748e-05, |
| "loss": 3.9304, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3493100920292224e-05, |
| "loss": 3.9385, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3484714972781704e-05, |
| "loss": 3.9331, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3476329025271184e-05, |
| "loss": 3.9399, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3467943077760664e-05, |
| "loss": 3.923, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3459573509053877e-05, |
| "loss": 3.93, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.345120394034709e-05, |
| "loss": 3.9283, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.344281799283657e-05, |
| "loss": 3.92, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3434432045326046e-05, |
| "loss": 3.9317, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3426046097815526e-05, |
| "loss": 3.9183, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3417660150305005e-05, |
| "loss": 3.9334, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.340927420279449e-05, |
| "loss": 3.9361, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.340088825528397e-05, |
| "loss": 3.9359, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.339250230777345e-05, |
| "loss": 3.9271, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3384116360262925e-05, |
| "loss": 3.94, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3375730412752405e-05, |
| "loss": 3.9176, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3367344465241885e-05, |
| "loss": 3.9376, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3358958517731365e-05, |
| "loss": 3.9257, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3350588949024578e-05, |
| "loss": 3.9071, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3342203001514058e-05, |
| "loss": 3.943, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3333817054003538e-05, |
| "loss": 3.9246, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3325431106493018e-05, |
| "loss": 3.9317, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3317045158982497e-05, |
| "loss": 3.9168, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3308659211471977e-05, |
| "loss": 3.922, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3300273263961457e-05, |
| "loss": 3.921, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3291887316450937e-05, |
| "loss": 3.9224, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.328351774774415e-05, |
| "loss": 3.9345, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.327513180023363e-05, |
| "loss": 3.9308, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.326676223152684e-05, |
| "loss": 3.9412, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.325837628401632e-05, |
| "loss": 3.9239, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.32499903365058e-05, |
| "loss": 3.9141, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3241604388995282e-05, |
| "loss": 3.9324, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.323321844148476e-05, |
| "loss": 3.9125, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.322483249397424e-05, |
| "loss": 3.9142, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.321644654646372e-05, |
| "loss": 3.9366, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.32080605989532e-05, |
| "loss": 3.9327, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.319967465144268e-05, |
| "loss": 3.9213, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.319130508273589e-05, |
| "loss": 3.9214, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.318291913522537e-05, |
| "loss": 3.9114, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.317454956651858e-05, |
| "loss": 3.9199, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.316616361900806e-05, |
| "loss": 3.9354, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.315777767149754e-05, |
| "loss": 3.9303, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.314939172398702e-05, |
| "loss": 3.929, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.31410057764765e-05, |
| "loss": 3.9277, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3132619828965983e-05, |
| "loss": 3.9445, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3124233881455463e-05, |
| "loss": 3.9188, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3115864312748672e-05, |
| "loss": 3.9334, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3107478365238152e-05, |
| "loss": 3.929, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3099092417727632e-05, |
| "loss": 3.9113, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3090706470217112e-05, |
| "loss": 3.9354, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3082320522706592e-05, |
| "loss": 3.9324, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3073950953999805e-05, |
| "loss": 3.9308, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3065565006489285e-05, |
| "loss": 3.9272, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.305717905897876e-05, |
| "loss": 3.9162, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.304879311146824e-05, |
| "loss": 3.9218, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.304040716395772e-05, |
| "loss": 3.9264, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3032021216447204e-05, |
| "loss": 3.9337, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3023651647740413e-05, |
| "loss": 3.9211, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3015265700229893e-05, |
| "loss": 3.9311, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3006879752719373e-05, |
| "loss": 3.9236, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2998510184012586e-05, |
| "loss": 3.9255, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2990124236502066e-05, |
| "loss": 3.9208, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2981738288991546e-05, |
| "loss": 3.9247, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2973352341481026e-05, |
| "loss": 3.9374, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2964966393970506e-05, |
| "loss": 3.938, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2956580446459986e-05, |
| "loss": 3.9405, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2948194498949466e-05, |
| "loss": 3.9099, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2939808551438942e-05, |
| "loss": 3.9295, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2931422603928422e-05, |
| "loss": 3.9237, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2923036656417905e-05, |
| "loss": 3.9326, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2914650708907385e-05, |
| "loss": 3.9243, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2906264761396865e-05, |
| "loss": 3.924, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2897878813886345e-05, |
| "loss": 3.9177, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2889509245179554e-05, |
| "loss": 3.9314, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2881139676472767e-05, |
| "loss": 3.9105, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2872753728962247e-05, |
| "loss": 3.9286, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2864367781451727e-05, |
| "loss": 3.9147, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2855981833941207e-05, |
| "loss": 3.933, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2847595886430687e-05, |
| "loss": 3.9238, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2839209938920167e-05, |
| "loss": 3.9243, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2830840370213376e-05, |
| "loss": 3.9249, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.282245442270286e-05, |
| "loss": 3.9261, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.281406847519234e-05, |
| "loss": 3.9273, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.280568252768182e-05, |
| "loss": 3.9246, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.27972965801713e-05, |
| "loss": 3.9268, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2788927011464508e-05, |
| "loss": 3.9456, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.278055744275772e-05, |
| "loss": 3.9264, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.27721714952472e-05, |
| "loss": 3.9202, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.276378554773668e-05, |
| "loss": 3.9253, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.275539960022616e-05, |
| "loss": 3.9274, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.274701365271564e-05, |
| "loss": 3.9178, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.273862770520512e-05, |
| "loss": 3.9298, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2730241757694597e-05, |
| "loss": 3.9159, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2721855810184077e-05, |
| "loss": 3.9336, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.271346986267356e-05, |
| "loss": 3.9322, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2705100293966773e-05, |
| "loss": 3.9248, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.269671434645625e-05, |
| "loss": 3.918, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.268832839894573e-05, |
| "loss": 3.9199, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.267994245143521e-05, |
| "loss": 3.9226, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.267155650392469e-05, |
| "loss": 3.9317, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.266317055641417e-05, |
| "loss": 3.9296, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2654784608903652e-05, |
| "loss": 3.9318, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.264639866139313e-05, |
| "loss": 3.9111, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.263802909268634e-05, |
| "loss": 3.9342, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.262964314517582e-05, |
| "loss": 3.9235, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.26212571976653e-05, |
| "loss": 3.9363, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2612887628958514e-05, |
| "loss": 3.9159, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2604501681447994e-05, |
| "loss": 3.9385, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2596115733937474e-05, |
| "loss": 3.9287, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2587729786426954e-05, |
| "loss": 3.9341, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.257934383891643e-05, |
| "loss": 3.9222, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.257095789140591e-05, |
| "loss": 3.9378, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.256257194389539e-05, |
| "loss": 3.9141, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.255418599638487e-05, |
| "loss": 3.929, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2545816427678083e-05, |
| "loss": 3.9304, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2537430480167563e-05, |
| "loss": 3.9233, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2529044532657043e-05, |
| "loss": 3.9265, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2520658585146523e-05, |
| "loss": 3.9212, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2512272637636002e-05, |
| "loss": 3.9302, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.014770030975342, |
| "eval_runtime": 299.9709, |
| "eval_samples_per_second": 1272.093, |
| "eval_steps_per_second": 39.754, |
| "step": 1679040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2503886690125482e-05, |
| "loss": 3.9263, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2495500742614962e-05, |
| "loss": 3.9183, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2487114795104442e-05, |
| "loss": 3.9321, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2478728847593922e-05, |
| "loss": 3.9237, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2470342900083402e-05, |
| "loss": 3.9332, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2461956952572882e-05, |
| "loss": 3.9253, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2453571005062362e-05, |
| "loss": 3.9273, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2445185057551842e-05, |
| "loss": 3.928, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2436799110041322e-05, |
| "loss": 3.9249, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2428413162530802e-05, |
| "loss": 3.9289, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2420027215020282e-05, |
| "loss": 3.9284, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2411641267509762e-05, |
| "loss": 3.9321, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.240327169880297e-05, |
| "loss": 3.9247, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.239488575129245e-05, |
| "loss": 3.9189, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.238649980378193e-05, |
| "loss": 3.9265, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2378113856271414e-05, |
| "loss": 3.9083, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2369727908760894e-05, |
| "loss": 3.9244, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2361358340054103e-05, |
| "loss": 3.9241, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2352972392543583e-05, |
| "loss": 3.9216, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2344586445033063e-05, |
| "loss": 3.9446, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2336200497522543e-05, |
| "loss": 3.9364, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2327814550012023e-05, |
| "loss": 3.9371, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2319428602501503e-05, |
| "loss": 3.9236, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2311042654990983e-05, |
| "loss": 3.9304, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2302656707480463e-05, |
| "loss": 3.9254, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2294287138773672e-05, |
| "loss": 3.9284, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2285917570066885e-05, |
| "loss": 3.9247, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2277531622556368e-05, |
| "loss": 3.9268, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2269145675045848e-05, |
| "loss": 3.9164, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2260759727535324e-05, |
| "loss": 3.9198, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2252373780024804e-05, |
| "loss": 3.9235, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2243987832514284e-05, |
| "loss": 3.9341, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2235601885003764e-05, |
| "loss": 3.9285, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2227215937493244e-05, |
| "loss": 3.9327, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2218829989982724e-05, |
| "loss": 3.9188, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2210460421275937e-05, |
| "loss": 3.921, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2202074473765417e-05, |
| "loss": 3.9255, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2193688526254897e-05, |
| "loss": 3.9163, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2185302578744377e-05, |
| "loss": 3.9234, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2176916631233853e-05, |
| "loss": 3.9157, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.216854706252707e-05, |
| "loss": 3.9246, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.216016111501655e-05, |
| "loss": 3.9303, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.215177516750603e-05, |
| "loss": 3.9261, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2143389219995506e-05, |
| "loss": 3.9237, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2135003272484985e-05, |
| "loss": 3.9289, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2126617324974465e-05, |
| "loss": 3.9179, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2118231377463945e-05, |
| "loss": 3.9289, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2109845429953425e-05, |
| "loss": 3.9208, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2101475861246638e-05, |
| "loss": 3.9006, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2093089913736118e-05, |
| "loss": 3.9431, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2084703966225598e-05, |
| "loss": 3.9142, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2076318018715078e-05, |
| "loss": 3.9268, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2067948450008287e-05, |
| "loss": 3.9068, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2059578881301503e-05, |
| "loss": 3.9178, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.205119293379098e-05, |
| "loss": 3.9101, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2042823365084192e-05, |
| "loss": 3.9194, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.203443741757367e-05, |
| "loss": 3.9237, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2026051470063148e-05, |
| "loss": 3.9242, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.201766552255263e-05, |
| "loss": 3.9337, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.200927957504211e-05, |
| "loss": 3.9181, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.200089362753159e-05, |
| "loss": 3.9075, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.199250768002107e-05, |
| "loss": 3.932, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.198412173251055e-05, |
| "loss": 3.906, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.197573578500003e-05, |
| "loss": 3.9047, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1967349837489508e-05, |
| "loss": 3.9305, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1958963889978988e-05, |
| "loss": 3.9284, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.195057794246847e-05, |
| "loss": 3.9146, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.194219199495795e-05, |
| "loss": 3.9185, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.193382242625116e-05, |
| "loss": 3.9042, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.192543647874064e-05, |
| "loss": 3.9143, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.191705053123012e-05, |
| "loss": 3.9257, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.19086645837196e-05, |
| "loss": 3.9233, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.190027863620908e-05, |
| "loss": 3.9272, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1891892688698563e-05, |
| "loss": 3.9245, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1883523119991773e-05, |
| "loss": 3.9339, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1875137172481253e-05, |
| "loss": 3.9112, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.186676760377446e-05, |
| "loss": 3.9286, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.185838165626394e-05, |
| "loss": 3.9186, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1849995708753425e-05, |
| "loss": 3.912, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1841609761242905e-05, |
| "loss": 3.9249, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1833223813732385e-05, |
| "loss": 3.929, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1824837866221865e-05, |
| "loss": 3.9248, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.181645191871134e-05, |
| "loss": 3.9231, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.180806597120082e-05, |
| "loss": 3.9093, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.17996800236903e-05, |
| "loss": 3.9139, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1791310454983514e-05, |
| "loss": 3.9233, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1782924507472994e-05, |
| "loss": 3.9268, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1774554938766206e-05, |
| "loss": 3.9177, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1766168991255686e-05, |
| "loss": 3.9267, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1757783043745163e-05, |
| "loss": 3.9103, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1749397096234646e-05, |
| "loss": 3.923, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.174102752752786e-05, |
| "loss": 3.9153, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.173264158001734e-05, |
| "loss": 3.9197, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1724255632506815e-05, |
| "loss": 3.9314, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1715869684996295e-05, |
| "loss": 3.9311, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1707483737485775e-05, |
| "loss": 3.9341, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1699097789975255e-05, |
| "loss": 3.9046, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1690711842464735e-05, |
| "loss": 3.9233, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1682325894954218e-05, |
| "loss": 3.9146, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1673956326247427e-05, |
| "loss": 3.9277, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1665586757540636e-05, |
| "loss": 3.9172, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1657200810030116e-05, |
| "loss": 3.9218, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1648814862519596e-05, |
| "loss": 3.9126, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.164042891500908e-05, |
| "loss": 3.9224, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.163205934630229e-05, |
| "loss": 3.908, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.162367339879177e-05, |
| "loss": 3.9197, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.161528745128125e-05, |
| "loss": 3.9095, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.160690150377073e-05, |
| "loss": 3.9267, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.159851555626021e-05, |
| "loss": 3.9162, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.159012960874969e-05, |
| "loss": 3.9173, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.158174366123917e-05, |
| "loss": 3.9181, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.157335771372865e-05, |
| "loss": 3.9221, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.156497176621813e-05, |
| "loss": 3.9203, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.155660219751134e-05, |
| "loss": 3.9164, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1548216250000817e-05, |
| "loss": 3.9258, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.15398303024903e-05, |
| "loss": 3.9333, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.153144435497978e-05, |
| "loss": 3.9257, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.152305840746926e-05, |
| "loss": 3.9161, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.151467245995874e-05, |
| "loss": 3.9139, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.150628651244822e-05, |
| "loss": 3.9224, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1497900564937697e-05, |
| "loss": 3.9113, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.148953099623091e-05, |
| "loss": 3.9245, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1481145048720393e-05, |
| "loss": 3.9099, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1472759101209873e-05, |
| "loss": 3.9299, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1464389532503082e-05, |
| "loss": 3.9245, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1456003584992562e-05, |
| "loss": 3.9203, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1447617637482042e-05, |
| "loss": 3.9126, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1439231689971522e-05, |
| "loss": 3.9125, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1430845742461002e-05, |
| "loss": 3.9171, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1422459794950482e-05, |
| "loss": 3.9276, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1414073847439962e-05, |
| "loss": 3.9227, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.140570427873317e-05, |
| "loss": 3.9222, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.139731833122265e-05, |
| "loss": 3.9081, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.138893238371213e-05, |
| "loss": 3.9285, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.138054643620161e-05, |
| "loss": 3.918, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1372160488691094e-05, |
| "loss": 3.9314, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1363774541180574e-05, |
| "loss": 3.9113, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1355388593670054e-05, |
| "loss": 3.9283, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.134700264615953e-05, |
| "loss": 3.9226, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1338633077452743e-05, |
| "loss": 3.9344, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1330247129942223e-05, |
| "loss": 3.9156, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1321861182431703e-05, |
| "loss": 3.9257, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1313475234921183e-05, |
| "loss": 3.9101, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1305105666214395e-05, |
| "loss": 3.9234, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1296719718703875e-05, |
| "loss": 3.9223, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1288333771193352e-05, |
| "loss": 3.9146, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1279947823682832e-05, |
| "loss": 3.9262, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1271561876172312e-05, |
| "loss": 3.91, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1263192307465528e-05, |
| "loss": 3.9262, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.012825012207031, |
| "eval_runtime": 507.2286, |
| "eval_samples_per_second": 752.306, |
| "eval_steps_per_second": 23.51, |
| "step": 1755360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1254822738758737e-05, |
| "loss": 3.9223, |
| "step": 1755648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1246436791248217e-05, |
| "loss": 3.9115, |
| "step": 1756160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1238050843737697e-05, |
| "loss": 3.9261, |
| "step": 1756672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1229664896227177e-05, |
| "loss": 3.9154, |
| "step": 1757184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1221278948716657e-05, |
| "loss": 3.929, |
| "step": 1757696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.121290938000987e-05, |
| "loss": 3.9195, |
| "step": 1758208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.120452343249935e-05, |
| "loss": 3.9204, |
| "step": 1758720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1196137484988826e-05, |
| "loss": 3.9239, |
| "step": 1759232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1187751537478306e-05, |
| "loss": 3.9181, |
| "step": 1759744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1179365589967786e-05, |
| "loss": 3.9245, |
| "step": 1760256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1170979642457266e-05, |
| "loss": 3.9219, |
| "step": 1760768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.116259369494675e-05, |
| "loss": 3.9259, |
| "step": 1761280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1154224126239958e-05, |
| "loss": 3.9221, |
| "step": 1761792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1145838178729438e-05, |
| "loss": 3.9125, |
| "step": 1762304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1137452231218918e-05, |
| "loss": 3.9142, |
| "step": 1762816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1129066283708398e-05, |
| "loss": 3.9071, |
| "step": 1763328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1120680336197878e-05, |
| "loss": 3.9162, |
| "step": 1763840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1112327146294823e-05, |
| "loss": 3.9217, |
| "step": 1764352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.11039411987843e-05, |
| "loss": 3.9126, |
| "step": 1764864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.109555525127378e-05, |
| "loss": 3.9355, |
| "step": 1765376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.108716930376326e-05, |
| "loss": 3.9305, |
| "step": 1765888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.107878335625274e-05, |
| "loss": 3.9326, |
| "step": 1766400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.107039740874222e-05, |
| "loss": 3.9203, |
| "step": 1766912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1062011461231703e-05, |
| "loss": 3.9188, |
| "step": 1767424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1053625513721183e-05, |
| "loss": 3.9255, |
| "step": 1767936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.104523956621066e-05, |
| "loss": 3.9191, |
| "step": 1768448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.103686999750387e-05, |
| "loss": 3.9164, |
| "step": 1768960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.102848404999335e-05, |
| "loss": 3.9242, |
| "step": 1769472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1020098102482828e-05, |
| "loss": 3.9072, |
| "step": 1769984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.101171215497231e-05, |
| "loss": 3.9134, |
| "step": 1770496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.100332620746179e-05, |
| "loss": 3.9178, |
| "step": 1771008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0994956638755004e-05, |
| "loss": 3.9242, |
| "step": 1771520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.098657069124448e-05, |
| "loss": 3.9264, |
| "step": 1772032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.097818474373396e-05, |
| "loss": 3.9265, |
| "step": 1772544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.096979879622344e-05, |
| "loss": 3.9118, |
| "step": 1773056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0961429227516656e-05, |
| "loss": 3.9147, |
| "step": 1773568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0953043280006133e-05, |
| "loss": 3.9228, |
| "step": 1774080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0944657332495613e-05, |
| "loss": 3.9103, |
| "step": 1774592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0936271384985093e-05, |
| "loss": 3.9181, |
| "step": 1775104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0927885437474573e-05, |
| "loss": 3.9066, |
| "step": 1775616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0919499489964053e-05, |
| "loss": 3.9215, |
| "step": 1776128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0911113542453533e-05, |
| "loss": 3.9194, |
| "step": 1776640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0902727594943013e-05, |
| "loss": 3.9217, |
| "step": 1777152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0894341647432492e-05, |
| "loss": 3.921, |
| "step": 1777664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0885955699921972e-05, |
| "loss": 3.9192, |
| "step": 1778176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0877569752411452e-05, |
| "loss": 3.9143, |
| "step": 1778688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0869183804900932e-05, |
| "loss": 3.9215, |
| "step": 1779200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0860797857390412e-05, |
| "loss": 3.9159, |
| "step": 1779712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.085242828868362e-05, |
| "loss": 3.8966, |
| "step": 1780224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0844042341173105e-05, |
| "loss": 3.9353, |
| "step": 1780736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0835656393662585e-05, |
| "loss": 3.9089, |
| "step": 1781248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0827270446152065e-05, |
| "loss": 3.9198, |
| "step": 1781760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0818917256249006e-05, |
| "loss": 3.9066, |
| "step": 1782272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0810531308738486e-05, |
| "loss": 3.9093, |
| "step": 1782784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0802145361227966e-05, |
| "loss": 3.9024, |
| "step": 1783296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.079377579252118e-05, |
| "loss": 3.9112, |
| "step": 1783808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0785406223814388e-05, |
| "loss": 3.9223, |
| "step": 1784320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0777020276303868e-05, |
| "loss": 3.915, |
| "step": 1784832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0768634328793348e-05, |
| "loss": 3.9298, |
| "step": 1785344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0760248381282828e-05, |
| "loss": 3.9181, |
| "step": 1785856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.075186243377231e-05, |
| "loss": 3.899, |
| "step": 1786368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0743476486261788e-05, |
| "loss": 3.9191, |
| "step": 1786880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0735090538751267e-05, |
| "loss": 3.9032, |
| "step": 1787392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0726704591240747e-05, |
| "loss": 3.8968, |
| "step": 1787904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0718318643730227e-05, |
| "loss": 3.929, |
| "step": 1788416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0709932696219707e-05, |
| "loss": 3.9206, |
| "step": 1788928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0701546748709187e-05, |
| "loss": 3.9084, |
| "step": 1789440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0693160801198667e-05, |
| "loss": 3.9144, |
| "step": 1789952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0684774853688147e-05, |
| "loss": 3.9015, |
| "step": 1790464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.067642166378509e-05, |
| "loss": 3.9078, |
| "step": 1790976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.066803571627457e-05, |
| "loss": 3.9174, |
| "step": 1791488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.065964976876405e-05, |
| "loss": 3.9172, |
| "step": 1792000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.065126382125353e-05, |
| "loss": 3.9215, |
| "step": 1792512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0642877873743012e-05, |
| "loss": 3.9144, |
| "step": 1793024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0634491926232492e-05, |
| "loss": 3.9318, |
| "step": 1793536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.062610597872197e-05, |
| "loss": 3.9029, |
| "step": 1794048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.061772003121145e-05, |
| "loss": 3.9263, |
| "step": 1794560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.060933408370093e-05, |
| "loss": 3.9103, |
| "step": 1795072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.060094813619041e-05, |
| "loss": 3.9087, |
| "step": 1795584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.059256218867989e-05, |
| "loss": 3.9177, |
| "step": 1796096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.058417624116937e-05, |
| "loss": 3.9219, |
| "step": 1796608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.057580667246258e-05, |
| "loss": 3.9194, |
| "step": 1797120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.056742072495206e-05, |
| "loss": 3.9214, |
| "step": 1797632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.055905115624527e-05, |
| "loss": 3.9, |
| "step": 1798144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.055066520873475e-05, |
| "loss": 3.9066, |
| "step": 1798656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0542279261224233e-05, |
| "loss": 3.9175, |
| "step": 1799168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0533909692517442e-05, |
| "loss": 3.9213, |
| "step": 1799680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0525523745006922e-05, |
| "loss": 3.9134, |
| "step": 1800192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0517137797496402e-05, |
| "loss": 3.9177, |
| "step": 1800704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0508751849985882e-05, |
| "loss": 3.9049, |
| "step": 1801216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0500365902475362e-05, |
| "loss": 3.9184, |
| "step": 1801728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0491979954964842e-05, |
| "loss": 3.9107, |
| "step": 1802240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0483594007454322e-05, |
| "loss": 3.9133, |
| "step": 1802752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0475208059943802e-05, |
| "loss": 3.9233, |
| "step": 1803264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0466822112433282e-05, |
| "loss": 3.9241, |
| "step": 1803776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0458436164922762e-05, |
| "loss": 3.9279, |
| "step": 1804288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0450050217412242e-05, |
| "loss": 3.9032, |
| "step": 1804800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.044168064870545e-05, |
| "loss": 3.9151, |
| "step": 1805312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0433294701194934e-05, |
| "loss": 3.9122, |
| "step": 1805824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0424908753684414e-05, |
| "loss": 3.9184, |
| "step": 1806336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0416522806173894e-05, |
| "loss": 3.9109, |
| "step": 1806848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0408136858663374e-05, |
| "loss": 3.9163, |
| "step": 1807360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.039975091115285e-05, |
| "loss": 3.909, |
| "step": 1807872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.039136496364233e-05, |
| "loss": 3.9125, |
| "step": 1808384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.038297901613181e-05, |
| "loss": 3.9065, |
| "step": 1808896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.037459306862129e-05, |
| "loss": 3.911, |
| "step": 1809408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0366223499914503e-05, |
| "loss": 3.9013, |
| "step": 1809920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0357837552403983e-05, |
| "loss": 3.92, |
| "step": 1810432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0349451604893463e-05, |
| "loss": 3.9121, |
| "step": 1810944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0341065657382943e-05, |
| "loss": 3.9137, |
| "step": 1811456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0332696088676152e-05, |
| "loss": 3.9118, |
| "step": 1811968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0324310141165635e-05, |
| "loss": 3.9155, |
| "step": 1812480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0315940572458848e-05, |
| "loss": 3.9144, |
| "step": 1812992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0307554624948324e-05, |
| "loss": 3.9105, |
| "step": 1813504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0299168677437804e-05, |
| "loss": 3.9216, |
| "step": 1814016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0290782729927284e-05, |
| "loss": 3.9269, |
| "step": 1814528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0282413161220497e-05, |
| "loss": 3.912, |
| "step": 1815040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0274027213709977e-05, |
| "loss": 3.9175, |
| "step": 1815552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0265641266199457e-05, |
| "loss": 3.9082, |
| "step": 1816064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0257255318688937e-05, |
| "loss": 3.9123, |
| "step": 1816576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0248869371178417e-05, |
| "loss": 3.915, |
| "step": 1817088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0240499802471626e-05, |
| "loss": 3.9156, |
| "step": 1817600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0232113854961106e-05, |
| "loss": 3.8986, |
| "step": 1818112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.022372790745059e-05, |
| "loss": 3.9213, |
| "step": 1818624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.021534195994007e-05, |
| "loss": 3.9197, |
| "step": 1819136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.020695601242955e-05, |
| "loss": 3.9164, |
| "step": 1819648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.019857006491903e-05, |
| "loss": 3.9048, |
| "step": 1820160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0190184117408505e-05, |
| "loss": 3.9082, |
| "step": 1820672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0181798169897985e-05, |
| "loss": 3.9083, |
| "step": 1821184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0173428601191198e-05, |
| "loss": 3.9199, |
| "step": 1821696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.016504265368068e-05, |
| "loss": 3.9161, |
| "step": 1822208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.015667308497389e-05, |
| "loss": 3.9197, |
| "step": 1822720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.014828713746337e-05, |
| "loss": 3.9003, |
| "step": 1823232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.013990118995285e-05, |
| "loss": 3.9227, |
| "step": 1823744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.013151524244233e-05, |
| "loss": 3.9137, |
| "step": 1824256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0123145673735543e-05, |
| "loss": 3.9243, |
| "step": 1824768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0114759726225023e-05, |
| "loss": 3.906, |
| "step": 1825280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0106373778714503e-05, |
| "loss": 3.9189, |
| "step": 1825792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.009798783120398e-05, |
| "loss": 3.9144, |
| "step": 1826304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.008960188369346e-05, |
| "loss": 3.9304, |
| "step": 1826816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.008121593618294e-05, |
| "loss": 3.9104, |
| "step": 1827328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.007282998867242e-05, |
| "loss": 3.9179, |
| "step": 1827840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.00644440411619e-05, |
| "loss": 3.9105, |
| "step": 1828352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0056058093651382e-05, |
| "loss": 3.9182, |
| "step": 1828864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0047672146140862e-05, |
| "loss": 3.9115, |
| "step": 1829376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.003928619863034e-05, |
| "loss": 3.9105, |
| "step": 1829888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.003091662992355e-05, |
| "loss": 3.9211, |
| "step": 1830400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.002253068241303e-05, |
| "loss": 3.9026, |
| "step": 1830912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.001414473490251e-05, |
| "loss": 3.9228, |
| "step": 1831424 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.009833812713623, |
| "eval_runtime": 507.281, |
| "eval_samples_per_second": 752.228, |
| "eval_steps_per_second": 23.508, |
| "step": 1831680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.000575878739199e-05, |
| "loss": 3.9138, |
| "step": 1831936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.999737283988147e-05, |
| "loss": 3.9075, |
| "step": 1832448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.998898689237095e-05, |
| "loss": 3.9155, |
| "step": 1832960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.998060094486043e-05, |
| "loss": 3.912, |
| "step": 1833472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.997221499734991e-05, |
| "loss": 3.9236, |
| "step": 1833984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.996382904983939e-05, |
| "loss": 3.9141, |
| "step": 1834496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9955443102328868e-05, |
| "loss": 3.9155, |
| "step": 1835008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.994705715481835e-05, |
| "loss": 3.9178, |
| "step": 1835520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.993867120730783e-05, |
| "loss": 3.9114, |
| "step": 1836032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.993028525979731e-05, |
| "loss": 3.9186, |
| "step": 1836544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.992189931228679e-05, |
| "loss": 3.9164, |
| "step": 1837056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.991351336477627e-05, |
| "loss": 3.9182, |
| "step": 1837568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.990514379606948e-05, |
| "loss": 3.9194, |
| "step": 1838080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.989675784855896e-05, |
| "loss": 3.9008, |
| "step": 1838592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9888371901048443e-05, |
| "loss": 3.9114, |
| "step": 1839104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9879985953537923e-05, |
| "loss": 3.8977, |
| "step": 1839616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.98716000060274e-05, |
| "loss": 3.9125, |
| "step": 1840128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.986321405851688e-05, |
| "loss": 3.9147, |
| "step": 1840640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9854844489810092e-05, |
| "loss": 3.9064, |
| "step": 1841152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9846458542299572e-05, |
| "loss": 3.9291, |
| "step": 1841664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9838072594789052e-05, |
| "loss": 3.9254, |
| "step": 1842176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9829686647278532e-05, |
| "loss": 3.9243, |
| "step": 1842688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9821300699768012e-05, |
| "loss": 3.917, |
| "step": 1843200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9812914752257492e-05, |
| "loss": 3.9148, |
| "step": 1843712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9804528804746972e-05, |
| "loss": 3.9218, |
| "step": 1844224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9796142857236452e-05, |
| "loss": 3.9095, |
| "step": 1844736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9787756909725932e-05, |
| "loss": 3.9099, |
| "step": 1845248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.977937096221541e-05, |
| "loss": 3.9176, |
| "step": 1845760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.977098501470489e-05, |
| "loss": 3.9035, |
| "step": 1846272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9762615445998104e-05, |
| "loss": 3.9075, |
| "step": 1846784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.975422949848758e-05, |
| "loss": 3.9143, |
| "step": 1847296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.974584355097706e-05, |
| "loss": 3.9182, |
| "step": 1847808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.973745760346654e-05, |
| "loss": 3.9143, |
| "step": 1848320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.972907165595602e-05, |
| "loss": 3.9251, |
| "step": 1848832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.97206857084455e-05, |
| "loss": 3.9063, |
| "step": 1849344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9712316139738713e-05, |
| "loss": 3.9086, |
| "step": 1849856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9703930192228193e-05, |
| "loss": 3.9148, |
| "step": 1850368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9695544244717673e-05, |
| "loss": 3.9005, |
| "step": 1850880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9687158297207153e-05, |
| "loss": 3.9155, |
| "step": 1851392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9678772349696633e-05, |
| "loss": 3.9055, |
| "step": 1851904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9670386402186113e-05, |
| "loss": 3.9104, |
| "step": 1852416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9662000454675593e-05, |
| "loss": 3.9157, |
| "step": 1852928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9653630885968805e-05, |
| "loss": 3.9162, |
| "step": 1853440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9645244938458285e-05, |
| "loss": 3.9166, |
| "step": 1853952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9636858990947762e-05, |
| "loss": 3.9142, |
| "step": 1854464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9628473043437242e-05, |
| "loss": 3.9095, |
| "step": 1854976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.962008709592672e-05, |
| "loss": 3.9124, |
| "step": 1855488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.96117011484162e-05, |
| "loss": 3.9145, |
| "step": 1856000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9603315200905685e-05, |
| "loss": 3.8883, |
| "step": 1856512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9594929253395165e-05, |
| "loss": 3.9243, |
| "step": 1857024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9586559684688374e-05, |
| "loss": 3.9061, |
| "step": 1857536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9578173737177854e-05, |
| "loss": 3.9167, |
| "step": 1858048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9569787789667334e-05, |
| "loss": 3.902, |
| "step": 1858560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9561401842156814e-05, |
| "loss": 3.9011, |
| "step": 1859072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9553032273450026e-05, |
| "loss": 3.9005, |
| "step": 1859584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9544662704743235e-05, |
| "loss": 3.902, |
| "step": 1860096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9536276757232715e-05, |
| "loss": 3.9202, |
| "step": 1860608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9527890809722195e-05, |
| "loss": 3.9052, |
| "step": 1861120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9519504862211675e-05, |
| "loss": 3.9259, |
| "step": 1861632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9511118914701155e-05, |
| "loss": 3.9128, |
| "step": 1862144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.950273296719064e-05, |
| "loss": 3.893, |
| "step": 1862656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.949434701968012e-05, |
| "loss": 3.912, |
| "step": 1863168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9485961072169595e-05, |
| "loss": 3.905, |
| "step": 1863680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9477575124659075e-05, |
| "loss": 3.8905, |
| "step": 1864192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9469189177148555e-05, |
| "loss": 3.917, |
| "step": 1864704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9460803229638035e-05, |
| "loss": 3.9143, |
| "step": 1865216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9452417282127515e-05, |
| "loss": 3.9037, |
| "step": 1865728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9444047713420727e-05, |
| "loss": 3.9083, |
| "step": 1866240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9435661765910207e-05, |
| "loss": 3.892, |
| "step": 1866752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9427275818399687e-05, |
| "loss": 3.907, |
| "step": 1867264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9418889870889167e-05, |
| "loss": 3.9107, |
| "step": 1867776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9410520302182376e-05, |
| "loss": 3.9122, |
| "step": 1868288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.940213435467186e-05, |
| "loss": 3.919, |
| "step": 1868800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.939374840716134e-05, |
| "loss": 3.9062, |
| "step": 1869312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.938536245965082e-05, |
| "loss": 3.9247, |
| "step": 1869824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.93769765121403e-05, |
| "loss": 3.9039, |
| "step": 1870336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.936860694343351e-05, |
| "loss": 3.9162, |
| "step": 1870848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.936022099592299e-05, |
| "loss": 3.9027, |
| "step": 1871360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.93518514272162e-05, |
| "loss": 3.9035, |
| "step": 1871872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.934346547970568e-05, |
| "loss": 3.913, |
| "step": 1872384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.933507953219516e-05, |
| "loss": 3.9185, |
| "step": 1872896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.932669358468464e-05, |
| "loss": 3.9128, |
| "step": 1873408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.931830763717412e-05, |
| "loss": 3.9171, |
| "step": 1873920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9309921689663598e-05, |
| "loss": 3.8928, |
| "step": 1874432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9301552120956813e-05, |
| "loss": 3.9, |
| "step": 1874944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9293166173446293e-05, |
| "loss": 3.9136, |
| "step": 1875456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9284780225935773e-05, |
| "loss": 3.915, |
| "step": 1875968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.927639427842525e-05, |
| "loss": 3.9068, |
| "step": 1876480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.926804108852219e-05, |
| "loss": 3.9132, |
| "step": 1876992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9259655141011675e-05, |
| "loss": 3.9003, |
| "step": 1877504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9251269193501155e-05, |
| "loss": 3.912, |
| "step": 1878016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9242883245990635e-05, |
| "loss": 3.9047, |
| "step": 1878528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9234497298480115e-05, |
| "loss": 3.9083, |
| "step": 1879040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9226111350969595e-05, |
| "loss": 3.9171, |
| "step": 1879552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.921772540345907e-05, |
| "loss": 3.9194, |
| "step": 1880064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.920933945594855e-05, |
| "loss": 3.9225, |
| "step": 1880576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.920095350843803e-05, |
| "loss": 3.8998, |
| "step": 1881088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9192567560927515e-05, |
| "loss": 3.9091, |
| "step": 1881600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9184197992220724e-05, |
| "loss": 3.9074, |
| "step": 1882112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9175812044710204e-05, |
| "loss": 3.9128, |
| "step": 1882624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9167426097199684e-05, |
| "loss": 3.9084, |
| "step": 1883136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9159040149689164e-05, |
| "loss": 3.9096, |
| "step": 1883648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9150654202178643e-05, |
| "loss": 3.907, |
| "step": 1884160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9142268254668123e-05, |
| "loss": 3.9059, |
| "step": 1884672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9133882307157603e-05, |
| "loss": 3.9026, |
| "step": 1885184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9125496359647083e-05, |
| "loss": 3.9053, |
| "step": 1885696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9117126790940296e-05, |
| "loss": 3.8979, |
| "step": 1886208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9108757222233505e-05, |
| "loss": 3.9152, |
| "step": 1886720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9100371274722985e-05, |
| "loss": 3.9085, |
| "step": 1887232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9091985327212468e-05, |
| "loss": 3.9039, |
| "step": 1887744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9083599379701948e-05, |
| "loss": 3.9097, |
| "step": 1888256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9075213432191428e-05, |
| "loss": 3.9059, |
| "step": 1888768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9066843863484637e-05, |
| "loss": 3.9113, |
| "step": 1889280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9058457915974117e-05, |
| "loss": 3.9045, |
| "step": 1889792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9050071968463597e-05, |
| "loss": 3.9179, |
| "step": 1890304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9041686020953077e-05, |
| "loss": 3.9192, |
| "step": 1890816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9033300073442557e-05, |
| "loss": 3.9122, |
| "step": 1891328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.902493050473577e-05, |
| "loss": 3.9097, |
| "step": 1891840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.901654455722525e-05, |
| "loss": 3.906, |
| "step": 1892352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9008158609714726e-05, |
| "loss": 3.9048, |
| "step": 1892864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8999772662204206e-05, |
| "loss": 3.9095, |
| "step": 1893376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8991386714693686e-05, |
| "loss": 3.9112, |
| "step": 1893888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.898300076718317e-05, |
| "loss": 3.8932, |
| "step": 1894400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.897463119847638e-05, |
| "loss": 3.9189, |
| "step": 1894912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.896624525096586e-05, |
| "loss": 3.9122, |
| "step": 1895424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8957859303455338e-05, |
| "loss": 3.9117, |
| "step": 1895936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8949473355944818e-05, |
| "loss": 3.9033, |
| "step": 1896448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8941120166041763e-05, |
| "loss": 3.9041, |
| "step": 1896960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8932734218531243e-05, |
| "loss": 3.9036, |
| "step": 1897472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8924348271020723e-05, |
| "loss": 3.9125, |
| "step": 1897984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.89159623235102e-05, |
| "loss": 3.9118, |
| "step": 1898496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.890757637599968e-05, |
| "loss": 3.9147, |
| "step": 1899008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.889919042848916e-05, |
| "loss": 3.8928, |
| "step": 1899520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.889080448097864e-05, |
| "loss": 3.9171, |
| "step": 1900032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8882418533468123e-05, |
| "loss": 3.9079, |
| "step": 1900544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8874032585957603e-05, |
| "loss": 3.9217, |
| "step": 1901056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8865646638447083e-05, |
| "loss": 3.902, |
| "step": 1901568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.885726069093656e-05, |
| "loss": 3.9083, |
| "step": 1902080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8848891122229772e-05, |
| "loss": 3.9123, |
| "step": 1902592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8840505174719252e-05, |
| "loss": 3.9217, |
| "step": 1903104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8832119227208732e-05, |
| "loss": 3.9053, |
| "step": 1903616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8823749658501944e-05, |
| "loss": 3.9135, |
| "step": 1904128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8815363710991424e-05, |
| "loss": 3.9075, |
| "step": 1904640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8806977763480904e-05, |
| "loss": 3.9178, |
| "step": 1905152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.879859181597038e-05, |
| "loss": 3.9032, |
| "step": 1905664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.879020586845986e-05, |
| "loss": 3.9037, |
| "step": 1906176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.878181992094934e-05, |
| "loss": 3.9122, |
| "step": 1906688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8773450352242557e-05, |
| "loss": 3.9029, |
| "step": 1907200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8765064404732033e-05, |
| "loss": 3.9153, |
| "step": 1907712 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.008368968963623, |
| "eval_runtime": 283.8799, |
| "eval_samples_per_second": 1344.199, |
| "eval_steps_per_second": 42.007, |
| "step": 1908000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8756678457221513e-05, |
| "loss": 3.906, |
| "step": 1908224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8748292509710993e-05, |
| "loss": 3.9031, |
| "step": 1908736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8739906562200473e-05, |
| "loss": 3.9119, |
| "step": 1909248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8731520614689953e-05, |
| "loss": 3.9073, |
| "step": 1909760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8723134667179433e-05, |
| "loss": 3.9197, |
| "step": 1910272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8714748719668913e-05, |
| "loss": 3.9101, |
| "step": 1910784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8706362772158393e-05, |
| "loss": 3.9068, |
| "step": 1911296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8697976824647873e-05, |
| "loss": 3.915, |
| "step": 1911808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8689590877137353e-05, |
| "loss": 3.9044, |
| "step": 1912320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8681204929626833e-05, |
| "loss": 3.9118, |
| "step": 1912832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8672818982116313e-05, |
| "loss": 3.9131, |
| "step": 1913344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8664465792213258e-05, |
| "loss": 3.9129, |
| "step": 1913856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8656096223506467e-05, |
| "loss": 3.9151, |
| "step": 1914368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8647710275995947e-05, |
| "loss": 3.8985, |
| "step": 1914880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8639324328485427e-05, |
| "loss": 3.9042, |
| "step": 1915392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8630938380974907e-05, |
| "loss": 3.8918, |
| "step": 1915904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8622552433464387e-05, |
| "loss": 3.908, |
| "step": 1916416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8614166485953867e-05, |
| "loss": 3.9135, |
| "step": 1916928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8605780538443347e-05, |
| "loss": 3.8984, |
| "step": 1917440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8597394590932826e-05, |
| "loss": 3.9244, |
| "step": 1917952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8589008643422306e-05, |
| "loss": 3.918, |
| "step": 1918464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8580622695911786e-05, |
| "loss": 3.9183, |
| "step": 1918976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8572236748401266e-05, |
| "loss": 3.9125, |
| "step": 1919488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8563850800890746e-05, |
| "loss": 3.9104, |
| "step": 1920000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.855548123218396e-05, |
| "loss": 3.9115, |
| "step": 1920512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8547111663477168e-05, |
| "loss": 3.9097, |
| "step": 1921024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8538725715966648e-05, |
| "loss": 3.8999, |
| "step": 1921536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8530339768456128e-05, |
| "loss": 3.9156, |
| "step": 1922048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8521953820945608e-05, |
| "loss": 3.8983, |
| "step": 1922560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.8513567873435088e-05, |
| "loss": 3.9042, |
| "step": 1923072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8505181925924568e-05, |
| "loss": 3.9097, |
| "step": 1923584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.849681235721778e-05, |
| "loss": 3.9128, |
| "step": 1924096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.848842640970726e-05, |
| "loss": 3.9084, |
| "step": 1924608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.848004046219674e-05, |
| "loss": 3.917, |
| "step": 1925120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8471654514686217e-05, |
| "loss": 3.9044, |
| "step": 1925632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8463284945979432e-05, |
| "loss": 3.9004, |
| "step": 1926144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8454898998468912e-05, |
| "loss": 3.9126, |
| "step": 1926656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.844651305095839e-05, |
| "loss": 3.8949, |
| "step": 1927168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.843812710344787e-05, |
| "loss": 3.9082, |
| "step": 1927680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.842974115593735e-05, |
| "loss": 3.9071, |
| "step": 1928192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.842135520842683e-05, |
| "loss": 3.9053, |
| "step": 1928704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.841296926091631e-05, |
| "loss": 3.9094, |
| "step": 1929216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.840459969220952e-05, |
| "loss": 3.9108, |
| "step": 1929728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8396213744699e-05, |
| "loss": 3.9118, |
| "step": 1930240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.838782779718848e-05, |
| "loss": 3.9072, |
| "step": 1930752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.837944184967796e-05, |
| "loss": 3.9031, |
| "step": 1931264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.837105590216744e-05, |
| "loss": 3.909, |
| "step": 1931776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.836266995465692e-05, |
| "loss": 3.9093, |
| "step": 1932288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.83542840071464e-05, |
| "loss": 3.8867, |
| "step": 1932800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.834589805963588e-05, |
| "loss": 3.917, |
| "step": 1933312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.833751211212536e-05, |
| "loss": 3.9019, |
| "step": 1933824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.832912616461484e-05, |
| "loss": 3.9078, |
| "step": 1934336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.832074021710432e-05, |
| "loss": 3.8974, |
| "step": 1934848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.83123542695938e-05, |
| "loss": 3.8971, |
| "step": 1935360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.830398470088701e-05, |
| "loss": 3.8983, |
| "step": 1935872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8295615132180222e-05, |
| "loss": 3.8965, |
| "step": 1936384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8287229184669702e-05, |
| "loss": 3.9143, |
| "step": 1936896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8278843237159182e-05, |
| "loss": 3.8997, |
| "step": 1937408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8270457289648662e-05, |
| "loss": 3.922, |
| "step": 1937920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8262071342138142e-05, |
| "loss": 3.9101, |
| "step": 1938432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8253701773431355e-05, |
| "loss": 3.8856, |
| "step": 1938944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8245315825920835e-05, |
| "loss": 3.9047, |
| "step": 1939456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8236946257214044e-05, |
| "loss": 3.8953, |
| "step": 1939968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8228560309703524e-05, |
| "loss": 3.8937, |
| "step": 1940480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8220174362193004e-05, |
| "loss": 3.9104, |
| "step": 1940992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8211788414682484e-05, |
| "loss": 3.9065, |
| "step": 1941504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8203402467171964e-05, |
| "loss": 3.8992, |
| "step": 1942016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8195016519661447e-05, |
| "loss": 3.9047, |
| "step": 1942528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8186630572150927e-05, |
| "loss": 3.8904, |
| "step": 1943040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8178244624640403e-05, |
| "loss": 3.8956, |
| "step": 1943552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8169858677129883e-05, |
| "loss": 3.9055, |
| "step": 1944064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8161489108423096e-05, |
| "loss": 3.9082, |
| "step": 1944576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8153103160912572e-05, |
| "loss": 3.9119, |
| "step": 1945088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8144717213402056e-05, |
| "loss": 3.9003, |
| "step": 1945600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8136331265891536e-05, |
| "loss": 3.9205, |
| "step": 1946112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8127945318381016e-05, |
| "loss": 3.8972, |
| "step": 1946624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8119575749674225e-05, |
| "loss": 3.917, |
| "step": 1947136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8111189802163705e-05, |
| "loss": 3.8944, |
| "step": 1947648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8102803854653185e-05, |
| "loss": 3.8993, |
| "step": 1948160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8094417907142665e-05, |
| "loss": 3.9035, |
| "step": 1948672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8086048338435877e-05, |
| "loss": 3.9139, |
| "step": 1949184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8077662390925357e-05, |
| "loss": 3.9084, |
| "step": 1949696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8069276443414837e-05, |
| "loss": 3.9115, |
| "step": 1950208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8060890495904317e-05, |
| "loss": 3.8872, |
| "step": 1950720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8052520927197526e-05, |
| "loss": 3.8907, |
| "step": 1951232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.804413497968701e-05, |
| "loss": 3.9091, |
| "step": 1951744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.8035765410980222e-05, |
| "loss": 3.9084, |
| "step": 1952256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.80273794634697e-05, |
| "loss": 3.8997, |
| "step": 1952768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.801899351595918e-05, |
| "loss": 3.9086, |
| "step": 1953280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.801060756844866e-05, |
| "loss": 3.9015, |
| "step": 1953792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.800222162093814e-05, |
| "loss": 3.9017, |
| "step": 1954304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.799383567342762e-05, |
| "loss": 3.8991, |
| "step": 1954816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.79854497259171e-05, |
| "loss": 3.9021, |
| "step": 1955328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.797706377840658e-05, |
| "loss": 3.9134, |
| "step": 1955840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7968677830896058e-05, |
| "loss": 3.9153, |
| "step": 1956352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7960291883385538e-05, |
| "loss": 3.9157, |
| "step": 1956864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.795192231467875e-05, |
| "loss": 3.8943, |
| "step": 1957376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7943536367168227e-05, |
| "loss": 3.9025, |
| "step": 1957888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7935166798461443e-05, |
| "loss": 3.9017, |
| "step": 1958400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7926780850950923e-05, |
| "loss": 3.9072, |
| "step": 1958912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7918394903440403e-05, |
| "loss": 3.9057, |
| "step": 1959424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.791000895592988e-05, |
| "loss": 3.9008, |
| "step": 1959936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.790162300841936e-05, |
| "loss": 3.9032, |
| "step": 1960448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.789323706090884e-05, |
| "loss": 3.8973, |
| "step": 1960960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.788485111339832e-05, |
| "loss": 3.9003, |
| "step": 1961472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7876465165887803e-05, |
| "loss": 3.9, |
| "step": 1961984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7868079218377283e-05, |
| "loss": 3.8959, |
| "step": 1962496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7859709649670492e-05, |
| "loss": 3.9062, |
| "step": 1963008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7851323702159972e-05, |
| "loss": 3.9053, |
| "step": 1963520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7842937754649452e-05, |
| "loss": 3.9006, |
| "step": 1964032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7834551807138932e-05, |
| "loss": 3.9001, |
| "step": 1964544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.782616585962841e-05, |
| "loss": 3.9015, |
| "step": 1965056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.781777991211789e-05, |
| "loss": 3.906, |
| "step": 1965568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.780939396460737e-05, |
| "loss": 3.9007, |
| "step": 1966080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.780100801709685e-05, |
| "loss": 3.9127, |
| "step": 1966592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.779262206958633e-05, |
| "loss": 3.9126, |
| "step": 1967104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.778423612207581e-05, |
| "loss": 3.9081, |
| "step": 1967616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7775882932172756e-05, |
| "loss": 3.9035, |
| "step": 1968128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7767496984662236e-05, |
| "loss": 3.9021, |
| "step": 1968640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7759111037151713e-05, |
| "loss": 3.898, |
| "step": 1969152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7750725089641193e-05, |
| "loss": 3.9071, |
| "step": 1969664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7742339142130673e-05, |
| "loss": 3.9001, |
| "step": 1970176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7733953194620153e-05, |
| "loss": 3.8927, |
| "step": 1970688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7725567247109633e-05, |
| "loss": 3.9143, |
| "step": 1971200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7717181299599113e-05, |
| "loss": 3.9081, |
| "step": 1971712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7708795352088593e-05, |
| "loss": 3.9068, |
| "step": 1972224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7700425783381805e-05, |
| "loss": 3.8976, |
| "step": 1972736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7692056214675014e-05, |
| "loss": 3.8964, |
| "step": 1973248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7683670267164494e-05, |
| "loss": 3.8972, |
| "step": 1973760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7675284319653974e-05, |
| "loss": 3.9077, |
| "step": 1974272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7666898372143458e-05, |
| "loss": 3.9061, |
| "step": 1974784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7658512424632937e-05, |
| "loss": 3.9119, |
| "step": 1975296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7650126477122417e-05, |
| "loss": 3.8876, |
| "step": 1975808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7641740529611894e-05, |
| "loss": 3.9163, |
| "step": 1976320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7633354582101374e-05, |
| "loss": 3.9005, |
| "step": 1976832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7624968634590854e-05, |
| "loss": 3.916, |
| "step": 1977344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.76166154446878e-05, |
| "loss": 3.9011, |
| "step": 1977856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.760822949717728e-05, |
| "loss": 3.8997, |
| "step": 1978368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.759984354966676e-05, |
| "loss": 3.9075, |
| "step": 1978880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.759145760215624e-05, |
| "loss": 3.9183, |
| "step": 1979392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7583071654645715e-05, |
| "loss": 3.9005, |
| "step": 1979904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7574685707135195e-05, |
| "loss": 3.9099, |
| "step": 1980416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.756629975962468e-05, |
| "loss": 3.9002, |
| "step": 1980928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.755793019091789e-05, |
| "loss": 3.9095, |
| "step": 1981440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7549544243407368e-05, |
| "loss": 3.8997, |
| "step": 1981952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7541158295896848e-05, |
| "loss": 3.8998, |
| "step": 1982464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.7532772348386328e-05, |
| "loss": 3.9065, |
| "step": 1982976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.752440277967954e-05, |
| "loss": 3.8985, |
| "step": 1983488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.751601683216902e-05, |
| "loss": 3.9142, |
| "step": 1984000 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.006655216217041, |
| "eval_runtime": 290.339, |
| "eval_samples_per_second": 1314.295, |
| "eval_steps_per_second": 41.073, |
| "step": 1984320 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 8.153519415427434e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|