| { |
| "best_metric": 3.8436367511749268, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-domain/transformer/3/checkpoints/checkpoint-915840", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 1831680, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.9609, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8264, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.1846, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.9686, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8018, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.7035, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.5891, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5219, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4357, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.388, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3373, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.2993, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 5.2476, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.1973, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1625, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1222, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.0917, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0708, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0409, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.0084, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 4.9978, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 4.9606, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9443, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9199, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.9088, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790384069844466e-05, |
| "loss": 4.8819, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.863, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8427, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.825, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 4.8102, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.7937, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.79, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.7682, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 4.7639, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971495967866098e-05, |
| "loss": 4.7558, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970657373115046e-05, |
| "loss": 4.7392, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969820416244367e-05, |
| "loss": 4.7292, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.7014, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.6988, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967304631991211e-05, |
| "loss": 4.681, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966466037240159e-05, |
| "loss": 4.6797, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965627442489107e-05, |
| "loss": 4.6672, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964788847738054e-05, |
| "loss": 4.6585, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963950252987002e-05, |
| "loss": 4.6366, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6493, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6337, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 4.6281, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960597511863168e-05, |
| "loss": 4.6199, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9597605549924894e-05, |
| "loss": 4.588, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95892359812181e-05, |
| "loss": 4.5879, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958085003370758e-05, |
| "loss": 4.593, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957246408619706e-05, |
| "loss": 4.5857, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956407813868654e-05, |
| "loss": 4.5676, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9555692191176016e-05, |
| "loss": 4.5492, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.5507, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5389, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.5623, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5217, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5344, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5299, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949700693740611e-05, |
| "loss": 4.5053, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948862098989559e-05, |
| "loss": 4.5096, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.5008, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.4832, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 4.4847, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945509357865724e-05, |
| "loss": 4.5018, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.4795, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.4691, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942995211492941e-05, |
| "loss": 4.46, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.4607, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4726, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.4701, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396424703691065e-05, |
| "loss": 4.4566, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388038756180545e-05, |
| "loss": 4.4626, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379652808670025e-05, |
| "loss": 4.4652, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371266861159505e-05, |
| "loss": 4.4501, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362897292452714e-05, |
| "loss": 4.4386, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354511344942194e-05, |
| "loss": 4.428, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346125397431674e-05, |
| "loss": 4.427, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 4.4208, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 4.416, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 4.4186, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 4.4196, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930421203868281e-05, |
| "loss": 4.4152, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 4.3886, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 4.3882, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 4.398, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 4.3995, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 4.3822, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925391273242342e-05, |
| "loss": 4.3846, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.92455267849129e-05, |
| "loss": 4.3878, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923714083740238e-05, |
| "loss": 4.3734, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922877126869559e-05, |
| "loss": 4.3697, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922038532118507e-05, |
| "loss": 4.3713, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921199937367455e-05, |
| "loss": 4.3623, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920361342616403e-05, |
| "loss": 4.3685, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919522747865351e-05, |
| "loss": 4.3597, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918684153114299e-05, |
| "loss": 4.3431, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917845558363247e-05, |
| "loss": 4.3528, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917006963612195e-05, |
| "loss": 4.3527, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916168368861143e-05, |
| "loss": 4.3491, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.915329774110091e-05, |
| "loss": 4.3483, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914491179359039e-05, |
| "loss": 4.3416, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913652584607987e-05, |
| "loss": 4.3388, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128156277373076e-05, |
| "loss": 4.3367, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119770329862556e-05, |
| "loss": 4.3139, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.3175, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9103014813645245e-05, |
| "loss": 4.3251, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909462886613473e-05, |
| "loss": 4.32, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908624291862421e-05, |
| "loss": 4.3107, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907785697111369e-05, |
| "loss": 4.3074, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906947102360317e-05, |
| "loss": 4.3044, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906108507609265e-05, |
| "loss": 4.3092, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905271550738586e-05, |
| "loss": 4.3042, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.3034, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903594361236482e-05, |
| "loss": 4.2993, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90275576648543e-05, |
| "loss": 4.3032, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901917171734378e-05, |
| "loss": 4.2986, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901078576983326e-05, |
| "loss": 4.2954, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900239982232274e-05, |
| "loss": 4.2966, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899403025361595e-05, |
| "loss": 4.2975, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.898564430610543e-05, |
| "loss": 4.2835, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.897725835859491e-05, |
| "loss": 4.2907, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968872411084396e-05, |
| "loss": 4.2778, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.896048646357387e-05, |
| "loss": 4.287, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.895210051606335e-05, |
| "loss": 4.2626, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.894371456855283e-05, |
| "loss": 4.2765, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.893532862104231e-05, |
| "loss": 4.262, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892695905233552e-05, |
| "loss": 4.2673, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918573104825e-05, |
| "loss": 4.2525, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891018715731448e-05, |
| "loss": 4.2805, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890180120980396e-05, |
| "loss": 4.2684, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889343164109717e-05, |
| "loss": 4.2547, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888504569358665e-05, |
| "loss": 4.2572, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8876659746076134e-05, |
| "loss": 4.2605, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8868273798565614e-05, |
| "loss": 4.2641, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8859887851055094e-05, |
| "loss": 4.264, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88515182823483e-05, |
| "loss": 4.2482, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884313233483778e-05, |
| "loss": 4.2558, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883474638732726e-05, |
| "loss": 4.2443, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882636043981674e-05, |
| "loss": 4.2555, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881799087110995e-05, |
| "loss": 4.2396, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880960492359943e-05, |
| "loss": 4.2433, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880121897608891e-05, |
| "loss": 4.2403, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879283302857839e-05, |
| "loss": 4.232, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878444708106787e-05, |
| "loss": 4.2378, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877606113355735e-05, |
| "loss": 4.2351, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876769156485057e-05, |
| "loss": 4.2323, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875930561734005e-05, |
| "loss": 4.2344, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875091966982953e-05, |
| "loss": 4.2244, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.190875053405762, |
| "eval_runtime": 308.1837, |
| "eval_samples_per_second": 1238.193, |
| "eval_steps_per_second": 38.694, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874253372231901e-05, |
| "loss": 4.2101, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873414777480849e-05, |
| "loss": 4.207, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.87257782061017e-05, |
| "loss": 4.2269, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.871739225859118e-05, |
| "loss": 4.2087, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709006311080657e-05, |
| "loss": 4.228, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700620363570137e-05, |
| "loss": 4.1989, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692250794863346e-05, |
| "loss": 4.206, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683864847352826e-05, |
| "loss": 4.1915, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8675478899842306e-05, |
| "loss": 4.2071, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8667092952331785e-05, |
| "loss": 4.1992, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.205, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865033743611448e-05, |
| "loss": 4.2007, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864196786740769e-05, |
| "loss": 4.1868, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.863358191989717e-05, |
| "loss": 4.1876, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862519597238665e-05, |
| "loss": 4.1836, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.861681002487613e-05, |
| "loss": 4.1772, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860842407736561e-05, |
| "loss": 4.1863, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860003812985509e-05, |
| "loss": 4.1817, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.859165218234457e-05, |
| "loss": 4.1774, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858328261363778e-05, |
| "loss": 4.1963, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857489666612726e-05, |
| "loss": 4.1707, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.856651071861674e-05, |
| "loss": 4.1807, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558124771106226e-05, |
| "loss": 4.1783, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.85497388235957e-05, |
| "loss": 4.1856, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541369254888915e-05, |
| "loss": 4.1612, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532983307378395e-05, |
| "loss": 4.1686, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524597359867875e-05, |
| "loss": 4.1618, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851621141235735e-05, |
| "loss": 4.1664, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8507841843650564e-05, |
| "loss": 4.1503, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8499455896140044e-05, |
| "loss": 4.155, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.1634, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8482684001119e-05, |
| "loss": 4.1576, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847429805360848e-05, |
| "loss": 4.1586, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846592848490169e-05, |
| "loss": 4.1626, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845754253739117e-05, |
| "loss": 4.1558, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844915658988065e-05, |
| "loss": 4.1559, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844077064237013e-05, |
| "loss": 4.1438, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843240107366335e-05, |
| "loss": 4.1519, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842401512615282e-05, |
| "loss": 4.1358, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84156291786423e-05, |
| "loss": 4.1452, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.1446, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839885728362126e-05, |
| "loss": 4.1426, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839048771491447e-05, |
| "loss": 4.1308, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838210176740395e-05, |
| "loss": 4.1407, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.837371581989343e-05, |
| "loss": 4.1439, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836532987238292e-05, |
| "loss": 4.1378, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8356960303676126e-05, |
| "loss": 4.1395, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8348574356165606e-05, |
| "loss": 4.111, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8340188408655086e-05, |
| "loss": 4.1177, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8331802461144566e-05, |
| "loss": 4.1368, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8323432892437775e-05, |
| "loss": 4.1289, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8315046944927255e-05, |
| "loss": 4.1225, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8306660997416735e-05, |
| "loss": 4.1074, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298275049906215e-05, |
| "loss": 4.1104, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289889102395695e-05, |
| "loss": 4.1051, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281519533688904e-05, |
| "loss": 4.1304, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273133586178384e-05, |
| "loss": 4.1043, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.826474763866787e-05, |
| "loss": 4.1164, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.825636169115735e-05, |
| "loss": 4.1213, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.824799212245056e-05, |
| "loss": 4.1002, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823960617494004e-05, |
| "loss": 4.1077, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823122022742952e-05, |
| "loss": 4.101, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8222834279919e-05, |
| "loss": 4.0953, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821446471121221e-05, |
| "loss": 4.0968, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820607876370169e-05, |
| "loss": 4.1117, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819769281619117e-05, |
| "loss": 4.1013, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818930686868065e-05, |
| "loss": 4.0914, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.818093729997386e-05, |
| "loss": 4.0904, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.817255135246334e-05, |
| "loss": 4.0911, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.816416540495282e-05, |
| "loss": 4.1064, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8155779457442305e-05, |
| "loss": 4.1087, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8147409888735514e-05, |
| "loss": 4.0994, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8139023941224994e-05, |
| "loss": 4.1104, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8130637993714474e-05, |
| "loss": 4.1087, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8122252046203954e-05, |
| "loss": 4.1042, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811388247749716e-05, |
| "loss": 4.0951, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810551290879037e-05, |
| "loss": 4.0912, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809712696127985e-05, |
| "loss": 4.0906, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808874101376933e-05, |
| "loss": 4.0861, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808035506625881e-05, |
| "loss": 4.0849, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.807196911874829e-05, |
| "loss": 4.0921, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806358317123777e-05, |
| "loss": 4.0964, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805519722372726e-05, |
| "loss": 4.0894, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804681127621674e-05, |
| "loss": 4.0726, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803844170750995e-05, |
| "loss": 4.0717, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803005575999943e-05, |
| "loss": 4.0802, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.802166981248891e-05, |
| "loss": 4.0886, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.801328386497839e-05, |
| "loss": 4.0726, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8004914296271596e-05, |
| "loss": 4.0773, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7996528348761076e-05, |
| "loss": 4.0841, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7988142401250556e-05, |
| "loss": 4.0727, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7979756453740036e-05, |
| "loss": 4.069, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7971386885033245e-05, |
| "loss": 4.0712, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7963000937522725e-05, |
| "loss": 4.0737, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.795461499001221e-05, |
| "loss": 4.0752, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.794622904250169e-05, |
| "loss": 4.0755, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.793784309499117e-05, |
| "loss": 4.0583, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792947352628438e-05, |
| "loss": 4.0715, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.792108757877386e-05, |
| "loss": 4.0646, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791270163126334e-05, |
| "loss": 4.0761, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790433206255655e-05, |
| "loss": 4.0668, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.789594611504603e-05, |
| "loss": 4.0671, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.788756016753551e-05, |
| "loss": 4.0627, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787917422002499e-05, |
| "loss": 4.0675, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.78708046513182e-05, |
| "loss": 4.0458, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786241870380768e-05, |
| "loss": 4.0493, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7854032756297166e-05, |
| "loss": 4.0594, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7845646808786646e-05, |
| "loss": 4.0547, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7837277240079855e-05, |
| "loss": 4.0484, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7828891292569335e-05, |
| "loss": 4.0454, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7820505345058815e-05, |
| "loss": 4.0466, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7812119397548295e-05, |
| "loss": 4.0529, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803733450037775e-05, |
| "loss": 4.0485, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.779534750252725e-05, |
| "loss": 4.0478, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.778696155501673e-05, |
| "loss": 4.0532, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.777857560750621e-05, |
| "loss": 4.0541, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7770206038799423e-05, |
| "loss": 4.0501, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7761820091288903e-05, |
| "loss": 4.0474, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7753434143778383e-05, |
| "loss": 4.0536, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.774504819626786e-05, |
| "loss": 4.0542, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.773667862756108e-05, |
| "loss": 4.0409, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.772829268005055e-05, |
| "loss": 4.0524, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771990673254003e-05, |
| "loss": 4.0386, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771152078502951e-05, |
| "loss": 4.0522, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770315121632272e-05, |
| "loss": 4.0254, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76947652688122e-05, |
| "loss": 4.0445, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.768637932130168e-05, |
| "loss": 4.0286, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767799337379116e-05, |
| "loss": 4.0329, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766962380508437e-05, |
| "loss": 4.0228, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766123785757386e-05, |
| "loss": 4.0528, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765285191006334e-05, |
| "loss": 4.0415, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764448234135655e-05, |
| "loss": 4.0326, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7636096393846026e-05, |
| "loss": 4.0289, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7627710446335506e-05, |
| "loss": 4.0394, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7619324498824986e-05, |
| "loss": 4.0425, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7610938551314466e-05, |
| "loss": 4.0405, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7602552603803946e-05, |
| "loss": 4.0317, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7594166656293426e-05, |
| "loss": 4.0349, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7585780708782906e-05, |
| "loss": 4.0292, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7577411140076115e-05, |
| "loss": 4.0436, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7569025192565595e-05, |
| "loss": 4.0235, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7560639245055075e-05, |
| "loss": 4.0285, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7552253297544555e-05, |
| "loss": 4.0248, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.754388372883777e-05, |
| "loss": 4.0213, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.753549778132725e-05, |
| "loss": 4.0316, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.752711183381673e-05, |
| "loss": 4.0247, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751872588630621e-05, |
| "loss": 4.0271, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.751035631759942e-05, |
| "loss": 4.0245, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.75019703700889e-05, |
| "loss": 4.0211, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.022885799407959, |
| "eval_runtime": 307.5001, |
| "eval_samples_per_second": 1240.946, |
| "eval_steps_per_second": 38.78, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.749358442257838e-05, |
| "loss": 4.0085, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748519847506786e-05, |
| "loss": 4.0056, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747681252755734e-05, |
| "loss": 4.025, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746842658004682e-05, |
| "loss": 4.0097, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74600406325363e-05, |
| "loss": 4.0341, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.745165468502578e-05, |
| "loss": 4.0025, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.744326873751526e-05, |
| "loss": 4.0125, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.743488279000474e-05, |
| "loss": 3.9976, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.742649684249422e-05, |
| "loss": 4.0141, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74181108949837e-05, |
| "loss": 4.0091, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740972494747318e-05, |
| "loss": 4.012, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740135537876639e-05, |
| "loss": 4.0102, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392985810059604e-05, |
| "loss": 3.9968, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384599862549084e-05, |
| "loss": 4.004, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737621391503856e-05, |
| "loss": 3.9997, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736782796752804e-05, |
| "loss": 3.9959, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735944202001752e-05, |
| "loss": 3.9955, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7351056072507e-05, |
| "loss": 3.9988, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734267012499648e-05, |
| "loss": 3.9943, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733428417748596e-05, |
| "loss": 4.0155, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732591460877917e-05, |
| "loss": 3.9913, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731752866126865e-05, |
| "loss": 3.999, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730915909256186e-05, |
| "loss": 3.999, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730077314505134e-05, |
| "loss": 4.0069, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729238719754082e-05, |
| "loss": 3.9855, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.72840012500303e-05, |
| "loss": 3.9973, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727561530251978e-05, |
| "loss": 3.9875, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726722935500926e-05, |
| "loss": 3.9926, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725884340749874e-05, |
| "loss": 3.9772, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725045745998822e-05, |
| "loss": 3.9826, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724208789128143e-05, |
| "loss": 3.9918, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7233718322574647e-05, |
| "loss": 3.9902, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7225332375064127e-05, |
| "loss": 3.9913, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7216946427553606e-05, |
| "loss": 3.9945, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7208560480043086e-05, |
| "loss": 3.9876, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7200174532532566e-05, |
| "loss": 3.9924, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191788585022046e-05, |
| "loss": 3.9771, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7183402637511526e-05, |
| "loss": 3.9902, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7175016690001006e-05, |
| "loss": 3.9717, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7166647121294215e-05, |
| "loss": 3.9827, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158261173783695e-05, |
| "loss": 3.9792, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7149891605076904e-05, |
| "loss": 3.9853, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7141505657566384e-05, |
| "loss": 3.9787, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7133119710055864e-05, |
| "loss": 3.9751, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712473376254535e-05, |
| "loss": 3.986, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711634781503483e-05, |
| "loss": 3.9818, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710796186752431e-05, |
| "loss": 3.9786, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709957592001379e-05, |
| "loss": 3.964, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709118997250327e-05, |
| "loss": 3.9608, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708282040379648e-05, |
| "loss": 3.9773, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707443445628596e-05, |
| "loss": 3.9736, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706604850877544e-05, |
| "loss": 3.9706, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705767894006865e-05, |
| "loss": 3.9543, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704929299255813e-05, |
| "loss": 3.9605, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704090704504761e-05, |
| "loss": 3.9572, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.703252109753709e-05, |
| "loss": 3.9727, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702413515002657e-05, |
| "loss": 3.9556, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701574920251605e-05, |
| "loss": 3.9687, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700736325500553e-05, |
| "loss": 3.969, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699897730749501e-05, |
| "loss": 3.9552, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699060773878822e-05, |
| "loss": 3.9595, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69822217912777e-05, |
| "loss": 3.9556, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697383584376718e-05, |
| "loss": 3.9472, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696544989625666e-05, |
| "loss": 3.9569, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695706394874614e-05, |
| "loss": 3.9606, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694867800123562e-05, |
| "loss": 3.9659, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69402920537251e-05, |
| "loss": 3.9426, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6931922485018307e-05, |
| "loss": 3.9485, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6923536537507787e-05, |
| "loss": 3.947, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6915150589997267e-05, |
| "loss": 3.9724, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690676464248675e-05, |
| "loss": 3.9582, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.689837869497623e-05, |
| "loss": 3.959, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688999274746571e-05, |
| "loss": 3.9727, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688160679995519e-05, |
| "loss": 3.9691, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687322085244467e-05, |
| "loss": 3.9638, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686485128373788e-05, |
| "loss": 3.9548, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685646533622736e-05, |
| "loss": 3.9574, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684807938871684e-05, |
| "loss": 3.9537, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683969344120632e-05, |
| "loss": 3.9492, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683132387249953e-05, |
| "loss": 3.9518, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682293792498901e-05, |
| "loss": 3.955, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681455197747849e-05, |
| "loss": 3.9619, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680616602996797e-05, |
| "loss": 3.9515, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679779646126119e-05, |
| "loss": 3.9422, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678941051375067e-05, |
| "loss": 3.936, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678102456624015e-05, |
| "loss": 3.9493, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6772654997533356e-05, |
| "loss": 3.9552, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6764269050022836e-05, |
| "loss": 3.9435, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755883102512316e-05, |
| "loss": 3.9425, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6747497155001796e-05, |
| "loss": 3.9556, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6739111207491276e-05, |
| "loss": 3.9345, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6730741638784485e-05, |
| "loss": 3.9442, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722355691273965e-05, |
| "loss": 3.9414, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6713969743763445e-05, |
| "loss": 3.9488, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705583796252925e-05, |
| "loss": 3.9488, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697197848742405e-05, |
| "loss": 3.9424, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688811901231885e-05, |
| "loss": 3.9334, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66804423325251e-05, |
| "loss": 3.9466, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667205638501458e-05, |
| "loss": 3.9353, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6663670437504054e-05, |
| "loss": 3.9524, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6655284489993534e-05, |
| "loss": 3.9417, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6646898542483013e-05, |
| "loss": 3.9434, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663852897377623e-05, |
| "loss": 3.934, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66301430262657e-05, |
| "loss": 3.9485, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662175707875518e-05, |
| "loss": 3.9203, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661337113124466e-05, |
| "loss": 3.9266, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660498518373414e-05, |
| "loss": 3.9355, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659659923622363e-05, |
| "loss": 3.9352, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.658822966751684e-05, |
| "loss": 3.9241, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657984372000632e-05, |
| "loss": 3.9271, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65714577724958e-05, |
| "loss": 3.9242, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656307182498528e-05, |
| "loss": 3.9332, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.655468587747476e-05, |
| "loss": 3.9274, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654631630876797e-05, |
| "loss": 3.9242, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653793036125745e-05, |
| "loss": 3.9383, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652954441374693e-05, |
| "loss": 3.9368, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.652115846623641e-05, |
| "loss": 3.9357, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651277251872589e-05, |
| "loss": 3.9262, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650438657121537e-05, |
| "loss": 3.9368, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.649601700250858e-05, |
| "loss": 3.9315, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648763105499806e-05, |
| "loss": 3.9245, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647924510748754e-05, |
| "loss": 3.9403, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647085915997702e-05, |
| "loss": 3.92, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64624732124665e-05, |
| "loss": 3.9355, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645408726495598e-05, |
| "loss": 3.9143, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644570131744546e-05, |
| "loss": 3.9292, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643733174873867e-05, |
| "loss": 3.9126, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642894580122815e-05, |
| "loss": 3.9187, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642055985371763e-05, |
| "loss": 3.9117, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641217390620711e-05, |
| "loss": 3.9373, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640380433750032e-05, |
| "loss": 3.9281, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63954183899898e-05, |
| "loss": 3.9243, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638703244247928e-05, |
| "loss": 3.9164, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637864649496876e-05, |
| "loss": 3.9245, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637026054745824e-05, |
| "loss": 3.9352, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6361890978751456e-05, |
| "loss": 3.9265, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6353505031240936e-05, |
| "loss": 3.9193, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6345119083730416e-05, |
| "loss": 3.9278, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633673313621989e-05, |
| "loss": 3.9205, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632834718870937e-05, |
| "loss": 3.9277, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631996124119885e-05, |
| "loss": 3.9152, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631159167249206e-05, |
| "loss": 3.9204, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630320572498154e-05, |
| "loss": 3.9156, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629481977747102e-05, |
| "loss": 3.9161, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.62864338299605e-05, |
| "loss": 3.9179, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6278064261253714e-05, |
| "loss": 3.923, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6269678313743194e-05, |
| "loss": 3.9142, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6261292366232674e-05, |
| "loss": 3.9165, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6252906418722154e-05, |
| "loss": 3.9159, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.949317216873169, |
| "eval_runtime": 305.1517, |
| "eval_samples_per_second": 1250.496, |
| "eval_steps_per_second": 39.079, |
| "step": 228960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6244520471211634e-05, |
| "loss": 3.9076, |
| "step": 229376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.623615090250484e-05, |
| "loss": 3.9037, |
| "step": 229888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.622776495499432e-05, |
| "loss": 3.9191, |
| "step": 230400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.62193790074838e-05, |
| "loss": 3.902, |
| "step": 230912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.621099305997328e-05, |
| "loss": 3.932, |
| "step": 231424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.620262349126649e-05, |
| "loss": 3.8964, |
| "step": 231936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.619423754375597e-05, |
| "loss": 3.9095, |
| "step": 232448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.618585159624545e-05, |
| "loss": 3.8935, |
| "step": 232960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.617746564873494e-05, |
| "loss": 3.9093, |
| "step": 233472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616909608002815e-05, |
| "loss": 3.9094, |
| "step": 233984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.616071013251763e-05, |
| "loss": 3.9038, |
| "step": 234496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.615232418500711e-05, |
| "loss": 3.9074, |
| "step": 235008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.614395461630032e-05, |
| "loss": 3.8943, |
| "step": 235520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.61355686687898e-05, |
| "loss": 3.9047, |
| "step": 236032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.612718272127928e-05, |
| "loss": 3.8982, |
| "step": 236544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.611879677376876e-05, |
| "loss": 3.8976, |
| "step": 237056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6110410826258237e-05, |
| "loss": 3.8961, |
| "step": 237568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6102024878747717e-05, |
| "loss": 3.8967, |
| "step": 238080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6093638931237196e-05, |
| "loss": 3.8963, |
| "step": 238592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.6085252983726676e-05, |
| "loss": 3.9124, |
| "step": 239104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.607688341501989e-05, |
| "loss": 3.8938, |
| "step": 239616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606849746750937e-05, |
| "loss": 3.8982, |
| "step": 240128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.606011151999885e-05, |
| "loss": 3.9005, |
| "step": 240640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.605172557248833e-05, |
| "loss": 3.9094, |
| "step": 241152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.604335600378154e-05, |
| "loss": 3.8869, |
| "step": 241664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.603497005627102e-05, |
| "loss": 3.9015, |
| "step": 242176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.60265841087605e-05, |
| "loss": 3.8908, |
| "step": 242688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.601819816124998e-05, |
| "loss": 3.8929, |
| "step": 243200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.600982859254319e-05, |
| "loss": 3.8803, |
| "step": 243712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.600144264503267e-05, |
| "loss": 3.8912, |
| "step": 244224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.599305669752215e-05, |
| "loss": 3.8941, |
| "step": 244736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.598467075001163e-05, |
| "loss": 3.8982, |
| "step": 245248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.597628480250111e-05, |
| "loss": 3.894, |
| "step": 245760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5967915233794326e-05, |
| "loss": 3.9, |
| "step": 246272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5959529286283806e-05, |
| "loss": 3.8903, |
| "step": 246784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5951143338773286e-05, |
| "loss": 3.8969, |
| "step": 247296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5942757391262766e-05, |
| "loss": 3.8855, |
| "step": 247808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5934387822555975e-05, |
| "loss": 3.8944, |
| "step": 248320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5926001875045455e-05, |
| "loss": 3.8771, |
| "step": 248832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5917615927534935e-05, |
| "loss": 3.8897, |
| "step": 249344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5909229980024415e-05, |
| "loss": 3.8867, |
| "step": 249856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5900860411317624e-05, |
| "loss": 3.8942, |
| "step": 250368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5892474463807104e-05, |
| "loss": 3.886, |
| "step": 250880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5884088516296584e-05, |
| "loss": 3.8844, |
| "step": 251392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5875702568786064e-05, |
| "loss": 3.8917, |
| "step": 251904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.586733300007928e-05, |
| "loss": 3.8887, |
| "step": 252416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585894705256876e-05, |
| "loss": 3.8879, |
| "step": 252928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.585056110505824e-05, |
| "loss": 3.8791, |
| "step": 253440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.584217515754772e-05, |
| "loss": 3.8635, |
| "step": 253952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.583380558884093e-05, |
| "loss": 3.8866, |
| "step": 254464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.582541964133041e-05, |
| "loss": 3.8819, |
| "step": 254976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.581703369381989e-05, |
| "loss": 3.8771, |
| "step": 255488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580864774630937e-05, |
| "loss": 3.8702, |
| "step": 256000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.580027817760258e-05, |
| "loss": 3.8687, |
| "step": 256512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579189223009206e-05, |
| "loss": 3.8641, |
| "step": 257024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.578350628258154e-05, |
| "loss": 3.8832, |
| "step": 257536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577512033507102e-05, |
| "loss": 3.8691, |
| "step": 258048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.57667343875605e-05, |
| "loss": 3.8748, |
| "step": 258560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575836481885371e-05, |
| "loss": 3.8827, |
| "step": 259072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.574997887134319e-05, |
| "loss": 3.8685, |
| "step": 259584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5741592923832666e-05, |
| "loss": 3.8693, |
| "step": 260096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5733206976322146e-05, |
| "loss": 3.8723, |
| "step": 260608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.572483740761536e-05, |
| "loss": 3.8557, |
| "step": 261120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.571645146010484e-05, |
| "loss": 3.8711, |
| "step": 261632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5708065512594315e-05, |
| "loss": 3.8748, |
| "step": 262144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5699679565083795e-05, |
| "loss": 3.8757, |
| "step": 262656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.569130999637701e-05, |
| "loss": 3.8562, |
| "step": 263168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.568292404886649e-05, |
| "loss": 3.8642, |
| "step": 263680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.567453810135597e-05, |
| "loss": 3.8598, |
| "step": 264192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.566615215384545e-05, |
| "loss": 3.8802, |
| "step": 264704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.565778258513867e-05, |
| "loss": 3.8758, |
| "step": 265216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564939663762814e-05, |
| "loss": 3.8738, |
| "step": 265728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.564101069011762e-05, |
| "loss": 3.8858, |
| "step": 266240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.56326247426071e-05, |
| "loss": 3.884, |
| "step": 266752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5624255173900316e-05, |
| "loss": 3.885, |
| "step": 267264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.561586922638979e-05, |
| "loss": 3.8638, |
| "step": 267776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.560748327887927e-05, |
| "loss": 3.8743, |
| "step": 268288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.559909733136875e-05, |
| "loss": 3.8735, |
| "step": 268800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5590727762661965e-05, |
| "loss": 3.8661, |
| "step": 269312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5582341815151445e-05, |
| "loss": 3.8726, |
| "step": 269824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5573955867640925e-05, |
| "loss": 3.8692, |
| "step": 270336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5565569920130405e-05, |
| "loss": 3.8755, |
| "step": 270848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5557200351423614e-05, |
| "loss": 3.8693, |
| "step": 271360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5548814403913094e-05, |
| "loss": 3.8525, |
| "step": 271872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5540428456402574e-05, |
| "loss": 3.8565, |
| "step": 272384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.5532042508892054e-05, |
| "loss": 3.867, |
| "step": 272896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.552367294018526e-05, |
| "loss": 3.8714, |
| "step": 273408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.551528699267474e-05, |
| "loss": 3.8613, |
| "step": 273920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.550690104516422e-05, |
| "loss": 3.8588, |
| "step": 274432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.54985150976537e-05, |
| "loss": 3.8708, |
| "step": 274944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.549014552894691e-05, |
| "loss": 3.8563, |
| "step": 275456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.548175958143639e-05, |
| "loss": 3.8598, |
| "step": 275968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.547337363392588e-05, |
| "loss": 3.8644, |
| "step": 276480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.546498768641536e-05, |
| "loss": 3.8689, |
| "step": 276992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.545661811770857e-05, |
| "loss": 3.8699, |
| "step": 277504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.544823217019805e-05, |
| "loss": 3.8622, |
| "step": 278016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543984622268753e-05, |
| "loss": 3.848, |
| "step": 278528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.543146027517701e-05, |
| "loss": 3.8693, |
| "step": 279040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.542307432766649e-05, |
| "loss": 3.8536, |
| "step": 279552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5414704758959696e-05, |
| "loss": 3.8732, |
| "step": 280064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5406318811449176e-05, |
| "loss": 3.864, |
| "step": 280576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5397932863938656e-05, |
| "loss": 3.8614, |
| "step": 281088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5389563295231865e-05, |
| "loss": 3.8558, |
| "step": 281600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5381177347721345e-05, |
| "loss": 3.8694, |
| "step": 282112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.537279140021083e-05, |
| "loss": 3.8436, |
| "step": 282624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.536440545270031e-05, |
| "loss": 3.8514, |
| "step": 283136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.535603588399352e-05, |
| "loss": 3.8508, |
| "step": 283648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5347649936483e-05, |
| "loss": 3.8578, |
| "step": 284160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533926398897248e-05, |
| "loss": 3.8457, |
| "step": 284672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.533087804146196e-05, |
| "loss": 3.8525, |
| "step": 285184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.532250847275517e-05, |
| "loss": 3.8513, |
| "step": 285696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.531412252524465e-05, |
| "loss": 3.8484, |
| "step": 286208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.530573657773413e-05, |
| "loss": 3.8541, |
| "step": 286720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.529735063022361e-05, |
| "loss": 3.8444, |
| "step": 287232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.528898106151682e-05, |
| "loss": 3.8627, |
| "step": 287744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.52805951140063e-05, |
| "loss": 3.8596, |
| "step": 288256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5272209166495786e-05, |
| "loss": 3.8599, |
| "step": 288768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5263823218985266e-05, |
| "loss": 3.8503, |
| "step": 289280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5255453650278475e-05, |
| "loss": 3.8584, |
| "step": 289792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5247067702767955e-05, |
| "loss": 3.8583, |
| "step": 290304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5238681755257435e-05, |
| "loss": 3.8465, |
| "step": 290816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5230295807746915e-05, |
| "loss": 3.8612, |
| "step": 291328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5221926239040124e-05, |
| "loss": 3.8463, |
| "step": 291840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5213540291529604e-05, |
| "loss": 3.8586, |
| "step": 292352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5205154344019084e-05, |
| "loss": 3.8425, |
| "step": 292864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5196768396508564e-05, |
| "loss": 3.8513, |
| "step": 293376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518839882780177e-05, |
| "loss": 3.8378, |
| "step": 293888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.518001288029125e-05, |
| "loss": 3.8444, |
| "step": 294400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.517162693278074e-05, |
| "loss": 3.8405, |
| "step": 294912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.516324098527022e-05, |
| "loss": 3.8571, |
| "step": 295424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.515487141656343e-05, |
| "loss": 3.8512, |
| "step": 295936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.514648546905291e-05, |
| "loss": 3.854, |
| "step": 296448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.513809952154239e-05, |
| "loss": 3.845, |
| "step": 296960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512971357403187e-05, |
| "loss": 3.8528, |
| "step": 297472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.512134400532508e-05, |
| "loss": 3.8584, |
| "step": 297984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.511295805781456e-05, |
| "loss": 3.8519, |
| "step": 298496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.510457211030404e-05, |
| "loss": 3.8478, |
| "step": 299008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.509618616279352e-05, |
| "loss": 3.8548, |
| "step": 299520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5087816594086726e-05, |
| "loss": 3.8474, |
| "step": 300032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.5079430646576206e-05, |
| "loss": 3.8558, |
| "step": 300544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.507104469906569e-05, |
| "loss": 3.8387, |
| "step": 301056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.506265875155517e-05, |
| "loss": 3.8511, |
| "step": 301568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.505428918284838e-05, |
| "loss": 3.8428, |
| "step": 302080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.504590323533786e-05, |
| "loss": 3.8434, |
| "step": 302592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.503751728782734e-05, |
| "loss": 3.8483, |
| "step": 303104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502913134031682e-05, |
| "loss": 3.8452, |
| "step": 303616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.502076177161003e-05, |
| "loss": 3.8445, |
| "step": 304128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.501237582409951e-05, |
| "loss": 3.841, |
| "step": 304640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.500398987658899e-05, |
| "loss": 3.8498, |
| "step": 305152 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.9089503288269043, |
| "eval_runtime": 304.6349, |
| "eval_samples_per_second": 1252.618, |
| "eval_steps_per_second": 39.145, |
| "step": 305280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.499560392907847e-05, |
| "loss": 3.831, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.498721798156795e-05, |
| "loss": 3.8315, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497883203405743e-05, |
| "loss": 3.8477, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497044608654691e-05, |
| "loss": 3.8333, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496206013903639e-05, |
| "loss": 3.859, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495367419152587e-05, |
| "loss": 3.8331, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494528824401535e-05, |
| "loss": 3.8337, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493690229650483e-05, |
| "loss": 3.8241, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4928532727798047e-05, |
| "loss": 3.8383, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492014678028752e-05, |
| "loss": 3.8447, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4911760832777e-05, |
| "loss": 3.8321, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.490337488526648e-05, |
| "loss": 3.835, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4895021695363425e-05, |
| "loss": 3.827, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4886635747852905e-05, |
| "loss": 3.8343, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4878249800342385e-05, |
| "loss": 3.8304, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4869863852831864e-05, |
| "loss": 3.8257, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4861477905321344e-05, |
| "loss": 3.8262, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4853091957810824e-05, |
| "loss": 3.8318, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4844706010300304e-05, |
| "loss": 3.824, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4836320062789784e-05, |
| "loss": 3.8388, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4827950494082993e-05, |
| "loss": 3.8296, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481956454657247e-05, |
| "loss": 3.8337, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481117859906195e-05, |
| "loss": 3.8332, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480279265155143e-05, |
| "loss": 3.84, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479442308284464e-05, |
| "loss": 3.8203, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478603713533412e-05, |
| "loss": 3.8314, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47776511878236e-05, |
| "loss": 3.8251, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476926524031308e-05, |
| "loss": 3.8222, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47608956716063e-05, |
| "loss": 3.8155, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.475250972409578e-05, |
| "loss": 3.8222, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474412377658526e-05, |
| "loss": 3.8316, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.473573782907474e-05, |
| "loss": 3.8312, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472736826036795e-05, |
| "loss": 3.8308, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471898231285743e-05, |
| "loss": 3.8315, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471059636534691e-05, |
| "loss": 3.8235, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470221041783639e-05, |
| "loss": 3.8297, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4693840849129596e-05, |
| "loss": 3.8231, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4685454901619076e-05, |
| "loss": 3.826, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4677068954108556e-05, |
| "loss": 3.8145, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466869938540177e-05, |
| "loss": 3.8227, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.466031343789125e-05, |
| "loss": 3.8204, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.465192749038073e-05, |
| "loss": 3.8293, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.464354154287021e-05, |
| "loss": 3.822, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.463517197416342e-05, |
| "loss": 3.8193, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.46267860266529e-05, |
| "loss": 3.8303, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461840007914238e-05, |
| "loss": 3.8219, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.461001413163186e-05, |
| "loss": 3.8219, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460164456292507e-05, |
| "loss": 3.8143, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459325861541455e-05, |
| "loss": 3.7995, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458487266790403e-05, |
| "loss": 3.8243, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457648672039351e-05, |
| "loss": 3.8209, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4568117151686725e-05, |
| "loss": 3.8142, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4559731204176205e-05, |
| "loss": 3.8006, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4551345256665685e-05, |
| "loss": 3.8082, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4542959309155165e-05, |
| "loss": 3.7984, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4534589740448374e-05, |
| "loss": 3.8224, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4526203792937854e-05, |
| "loss": 3.8061, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4517817845427334e-05, |
| "loss": 3.8098, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4509431897916814e-05, |
| "loss": 3.8203, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4501062329210023e-05, |
| "loss": 3.8056, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44926763816995e-05, |
| "loss": 3.8081, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448429043418898e-05, |
| "loss": 3.8126, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.447590448667846e-05, |
| "loss": 3.7937, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.446753491797168e-05, |
| "loss": 3.8086, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445914897046116e-05, |
| "loss": 3.8104, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445076302295064e-05, |
| "loss": 3.815, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444237707544012e-05, |
| "loss": 3.7971, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443400750673333e-05, |
| "loss": 3.8037, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442562155922281e-05, |
| "loss": 3.7971, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441723561171229e-05, |
| "loss": 3.8205, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440884966420177e-05, |
| "loss": 3.8131, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440048009549498e-05, |
| "loss": 3.809, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439209414798446e-05, |
| "loss": 3.8277, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.438370820047394e-05, |
| "loss": 3.8166, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437532225296342e-05, |
| "loss": 3.8304, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436695268425663e-05, |
| "loss": 3.8061, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435856673674611e-05, |
| "loss": 3.8116, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435018078923559e-05, |
| "loss": 3.8126, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434179484172507e-05, |
| "loss": 3.8018, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433342527301828e-05, |
| "loss": 3.8145, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432503932550776e-05, |
| "loss": 3.8083, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.431665337799724e-05, |
| "loss": 3.8137, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.430826743048672e-05, |
| "loss": 3.8089, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429989786177993e-05, |
| "loss": 3.796, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.429151191426941e-05, |
| "loss": 3.8061, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.428312596675889e-05, |
| "loss": 3.7994, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.427474001924837e-05, |
| "loss": 3.8135, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4266370450541586e-05, |
| "loss": 3.8041, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4257984503031066e-05, |
| "loss": 3.7987, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4249598555520546e-05, |
| "loss": 3.8093, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4241212608010026e-05, |
| "loss": 3.8036, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4232843039303235e-05, |
| "loss": 3.7971, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4224457091792715e-05, |
| "loss": 3.8029, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4216071144282195e-05, |
| "loss": 3.8122, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4207685196771675e-05, |
| "loss": 3.8121, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4199315628064884e-05, |
| "loss": 3.806, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4190929680554364e-05, |
| "loss": 3.7894, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4182543733043844e-05, |
| "loss": 3.8087, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4174157785533324e-05, |
| "loss": 3.7979, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416578821682654e-05, |
| "loss": 3.8127, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415740226931602e-05, |
| "loss": 3.8103, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.41490163218055e-05, |
| "loss": 3.8041, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414063037429498e-05, |
| "loss": 3.7971, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413226080558819e-05, |
| "loss": 3.8141, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.412387485807767e-05, |
| "loss": 3.783, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411548891056715e-05, |
| "loss": 3.7962, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.410710296305663e-05, |
| "loss": 3.7913, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409873339434984e-05, |
| "loss": 3.8065, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409034744683932e-05, |
| "loss": 3.7861, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40819614993288e-05, |
| "loss": 3.7934, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407359193062201e-05, |
| "loss": 3.7951, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4065205983111494e-05, |
| "loss": 3.7924, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4056820035600974e-05, |
| "loss": 3.7947, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4048434088090454e-05, |
| "loss": 3.7922, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404006451938366e-05, |
| "loss": 3.8046, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403167857187314e-05, |
| "loss": 3.8052, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402329262436262e-05, |
| "loss": 3.8006, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40149066768521e-05, |
| "loss": 3.7934, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400653710814531e-05, |
| "loss": 3.8047, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399815116063479e-05, |
| "loss": 3.7973, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398976521312427e-05, |
| "loss": 3.7904, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398137926561375e-05, |
| "loss": 3.8074, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397300969690696e-05, |
| "loss": 3.7889, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396462374939645e-05, |
| "loss": 3.8016, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395623780188593e-05, |
| "loss": 3.7873, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394785185437541e-05, |
| "loss": 3.7971, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3939482285668617e-05, |
| "loss": 3.784, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3931112716961826e-05, |
| "loss": 3.7868, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3922726769451306e-05, |
| "loss": 3.7895, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3914340821940786e-05, |
| "loss": 3.7996, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3905954874430265e-05, |
| "loss": 3.7932, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3897568926919745e-05, |
| "loss": 3.7975, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3889182979409225e-05, |
| "loss": 3.7866, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3880797031898705e-05, |
| "loss": 3.8001, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872411084388185e-05, |
| "loss": 3.8044, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3864041515681394e-05, |
| "loss": 3.7968, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.385565556817088e-05, |
| "loss": 3.793, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.384726962066036e-05, |
| "loss": 3.7979, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383888367314984e-05, |
| "loss": 3.7949, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.383051410444305e-05, |
| "loss": 3.8013, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.382212815693253e-05, |
| "loss": 3.7863, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.381374220942201e-05, |
| "loss": 3.7974, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.380535626191148e-05, |
| "loss": 3.7877, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.37969866932047e-05, |
| "loss": 3.7896, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378860074569418e-05, |
| "loss": 3.7922, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.378021479818366e-05, |
| "loss": 3.7917, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.377182885067313e-05, |
| "loss": 3.7931, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.376345928196635e-05, |
| "loss": 3.7824, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3755073334455835e-05, |
| "loss": 3.7991, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8841354846954346, |
| "eval_runtime": 307.8344, |
| "eval_samples_per_second": 1239.598, |
| "eval_steps_per_second": 38.738, |
| "step": 381600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3746687386945315e-05, |
| "loss": 3.7829, |
| "step": 381952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.373830143943479e-05, |
| "loss": 3.7762, |
| "step": 382464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3729931870728004e-05, |
| "loss": 3.7952, |
| "step": 382976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3721545923217484e-05, |
| "loss": 3.783, |
| "step": 383488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.371315997570696e-05, |
| "loss": 3.8023, |
| "step": 384000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.370477402819644e-05, |
| "loss": 3.7837, |
| "step": 384512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.369640445948965e-05, |
| "loss": 3.775, |
| "step": 385024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.368801851197913e-05, |
| "loss": 3.7778, |
| "step": 385536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3679632564468606e-05, |
| "loss": 3.7811, |
| "step": 386048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3671246616958086e-05, |
| "loss": 3.7898, |
| "step": 386560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.36628770482513e-05, |
| "loss": 3.7795, |
| "step": 387072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.365449110074079e-05, |
| "loss": 3.784, |
| "step": 387584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3646121532034e-05, |
| "loss": 3.7765, |
| "step": 388096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.363773558452348e-05, |
| "loss": 3.7761, |
| "step": 388608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362934963701296e-05, |
| "loss": 3.7833, |
| "step": 389120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.362096368950243e-05, |
| "loss": 3.7713, |
| "step": 389632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.361257774199191e-05, |
| "loss": 3.7721, |
| "step": 390144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.360419179448139e-05, |
| "loss": 3.7804, |
| "step": 390656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.359580584697087e-05, |
| "loss": 3.776, |
| "step": 391168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.358741989946035e-05, |
| "loss": 3.7874, |
| "step": 391680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357905033075356e-05, |
| "loss": 3.7795, |
| "step": 392192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.357066438324304e-05, |
| "loss": 3.7812, |
| "step": 392704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3562278435732526e-05, |
| "loss": 3.7828, |
| "step": 393216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3553892488222006e-05, |
| "loss": 3.7864, |
| "step": 393728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3545522919515215e-05, |
| "loss": 3.7681, |
| "step": 394240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3537136972004695e-05, |
| "loss": 3.7869, |
| "step": 394752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3528751024494175e-05, |
| "loss": 3.7707, |
| "step": 395264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3520365076983655e-05, |
| "loss": 3.7725, |
| "step": 395776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3511995508276864e-05, |
| "loss": 3.7677, |
| "step": 396288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.3503609560766344e-05, |
| "loss": 3.7708, |
| "step": 396800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3495223613255824e-05, |
| "loss": 3.7805, |
| "step": 397312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3486837665745304e-05, |
| "loss": 3.7761, |
| "step": 397824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347846809703851e-05, |
| "loss": 3.7852, |
| "step": 398336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.347009852833173e-05, |
| "loss": 3.7848, |
| "step": 398848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.346171258082121e-05, |
| "loss": 3.772, |
| "step": 399360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.345332663331069e-05, |
| "loss": 3.7774, |
| "step": 399872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.344494068580017e-05, |
| "loss": 3.7738, |
| "step": 400384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.343655473828965e-05, |
| "loss": 3.7741, |
| "step": 400896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.342816879077913e-05, |
| "loss": 3.7688, |
| "step": 401408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.341978284326861e-05, |
| "loss": 3.7706, |
| "step": 401920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.341139689575809e-05, |
| "loss": 3.7747, |
| "step": 402432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.34030273270513e-05, |
| "loss": 3.7825, |
| "step": 402944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.339464137954078e-05, |
| "loss": 3.7696, |
| "step": 403456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.338625543203026e-05, |
| "loss": 3.7702, |
| "step": 403968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.337786948451974e-05, |
| "loss": 3.7795, |
| "step": 404480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.336949991581295e-05, |
| "loss": 3.7757, |
| "step": 404992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3361113968302434e-05, |
| "loss": 3.7726, |
| "step": 405504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3352728020791914e-05, |
| "loss": 3.7666, |
| "step": 406016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3344342073281393e-05, |
| "loss": 3.7462, |
| "step": 406528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.33359725045746e-05, |
| "loss": 3.7787, |
| "step": 407040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.332758655706408e-05, |
| "loss": 3.7704, |
| "step": 407552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331920060955356e-05, |
| "loss": 3.7695, |
| "step": 408064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.331081466204304e-05, |
| "loss": 3.7581, |
| "step": 408576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.330244509333625e-05, |
| "loss": 3.7522, |
| "step": 409088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.329405914582573e-05, |
| "loss": 3.752, |
| "step": 409600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.328567319831521e-05, |
| "loss": 3.7683, |
| "step": 410112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.327728725080469e-05, |
| "loss": 3.7596, |
| "step": 410624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.32689176820979e-05, |
| "loss": 3.7657, |
| "step": 411136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.326053173458739e-05, |
| "loss": 3.7698, |
| "step": 411648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.325214578707687e-05, |
| "loss": 3.7578, |
| "step": 412160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.324375983956635e-05, |
| "loss": 3.7621, |
| "step": 412672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3235390270859556e-05, |
| "loss": 3.7666, |
| "step": 413184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3227004323349036e-05, |
| "loss": 3.7477, |
| "step": 413696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3218618375838516e-05, |
| "loss": 3.7572, |
| "step": 414208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3210248807131725e-05, |
| "loss": 3.7626, |
| "step": 414720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3201862859621205e-05, |
| "loss": 3.7685, |
| "step": 415232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3193476912110685e-05, |
| "loss": 3.7481, |
| "step": 415744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3185090964600165e-05, |
| "loss": 3.7572, |
| "step": 416256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3176721395893374e-05, |
| "loss": 3.7508, |
| "step": 416768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3168335448382854e-05, |
| "loss": 3.7695, |
| "step": 417280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.315994950087234e-05, |
| "loss": 3.7683, |
| "step": 417792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.315156355336182e-05, |
| "loss": 3.7612, |
| "step": 418304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.314319398465503e-05, |
| "loss": 3.7776, |
| "step": 418816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.313480803714451e-05, |
| "loss": 3.7706, |
| "step": 419328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.312642208963399e-05, |
| "loss": 3.7855, |
| "step": 419840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.311803614212347e-05, |
| "loss": 3.7596, |
| "step": 420352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310966657341668e-05, |
| "loss": 3.764, |
| "step": 420864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.310128062590616e-05, |
| "loss": 3.7617, |
| "step": 421376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.309289467839564e-05, |
| "loss": 3.7543, |
| "step": 421888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.308450873088512e-05, |
| "loss": 3.7699, |
| "step": 422400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.307613916217833e-05, |
| "loss": 3.762, |
| "step": 422912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.306775321466781e-05, |
| "loss": 3.7677, |
| "step": 423424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.305936726715729e-05, |
| "loss": 3.7623, |
| "step": 423936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3050981319646775e-05, |
| "loss": 3.749, |
| "step": 424448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3042611750939984e-05, |
| "loss": 3.7582, |
| "step": 424960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3034225803429464e-05, |
| "loss": 3.7521, |
| "step": 425472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3025839855918944e-05, |
| "loss": 3.7682, |
| "step": 425984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.3017453908408424e-05, |
| "loss": 3.762, |
| "step": 426496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.300908433970163e-05, |
| "loss": 3.7461, |
| "step": 427008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.300069839219111e-05, |
| "loss": 3.7684, |
| "step": 427520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.299231244468059e-05, |
| "loss": 3.7571, |
| "step": 428032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.298392649717007e-05, |
| "loss": 3.7498, |
| "step": 428544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.297555692846328e-05, |
| "loss": 3.756, |
| "step": 429056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.296717098095276e-05, |
| "loss": 3.7676, |
| "step": 429568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295878503344224e-05, |
| "loss": 3.7628, |
| "step": 430080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.295039908593173e-05, |
| "loss": 3.7651, |
| "step": 430592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.294202951722494e-05, |
| "loss": 3.7452, |
| "step": 431104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.293364356971442e-05, |
| "loss": 3.7625, |
| "step": 431616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.29252576222039e-05, |
| "loss": 3.7533, |
| "step": 432128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.291687167469338e-05, |
| "loss": 3.7653, |
| "step": 432640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2908502105986586e-05, |
| "loss": 3.7657, |
| "step": 433152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2900132537279795e-05, |
| "loss": 3.755, |
| "step": 433664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2891746589769275e-05, |
| "loss": 3.7579, |
| "step": 434176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2883360642258755e-05, |
| "loss": 3.766, |
| "step": 434688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2874974694748235e-05, |
| "loss": 3.7419, |
| "step": 435200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2866588747237715e-05, |
| "loss": 3.7498, |
| "step": 435712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2858202799727195e-05, |
| "loss": 3.7461, |
| "step": 436224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284981685221668e-05, |
| "loss": 3.7579, |
| "step": 436736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.284143090470616e-05, |
| "loss": 3.7439, |
| "step": 437248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.283306133599937e-05, |
| "loss": 3.7513, |
| "step": 437760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.282469176729258e-05, |
| "loss": 3.7483, |
| "step": 438272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.281630581978206e-05, |
| "loss": 3.7487, |
| "step": 438784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.280791987227154e-05, |
| "loss": 3.7539, |
| "step": 439296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.279953392476102e-05, |
| "loss": 3.7471, |
| "step": 439808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.27911479772505e-05, |
| "loss": 3.7611, |
| "step": 440320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.278277840854371e-05, |
| "loss": 3.7595, |
| "step": 440832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.277439246103319e-05, |
| "loss": 3.7562, |
| "step": 441344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.276600651352267e-05, |
| "loss": 3.7497, |
| "step": 441856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.275762056601215e-05, |
| "loss": 3.7585, |
| "step": 442368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2749234618501636e-05, |
| "loss": 3.7573, |
| "step": 442880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2740865049794845e-05, |
| "loss": 3.7435, |
| "step": 443392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2732479102284325e-05, |
| "loss": 3.7655, |
| "step": 443904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2724093154773805e-05, |
| "loss": 3.7413, |
| "step": 444416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2715707207263285e-05, |
| "loss": 3.7591, |
| "step": 444928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.270732125975276e-05, |
| "loss": 3.7395, |
| "step": 445440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269893531224224e-05, |
| "loss": 3.7549, |
| "step": 445952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.269054936473172e-05, |
| "loss": 3.7411, |
| "step": 446464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.268217979602493e-05, |
| "loss": 3.7451, |
| "step": 446976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.267379384851441e-05, |
| "loss": 3.7415, |
| "step": 447488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2665407901003887e-05, |
| "loss": 3.7611, |
| "step": 448000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.265702195349337e-05, |
| "loss": 3.7477, |
| "step": 448512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264863600598285e-05, |
| "loss": 3.7556, |
| "step": 449024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.264025005847233e-05, |
| "loss": 3.7431, |
| "step": 449536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.263188048976554e-05, |
| "loss": 3.7591, |
| "step": 450048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.262349454225502e-05, |
| "loss": 3.7577, |
| "step": 450560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.26151085947445e-05, |
| "loss": 3.7545, |
| "step": 451072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.260672264723398e-05, |
| "loss": 3.752, |
| "step": 451584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.259833669972346e-05, |
| "loss": 3.7501, |
| "step": 452096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258995075221294e-05, |
| "loss": 3.7521, |
| "step": 452608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.258156480470242e-05, |
| "loss": 3.7606, |
| "step": 453120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.25731788571919e-05, |
| "loss": 3.7426, |
| "step": 453632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.256480928848511e-05, |
| "loss": 3.7528, |
| "step": 454144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.255642334097459e-05, |
| "loss": 3.746, |
| "step": 454656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.254803739346407e-05, |
| "loss": 3.7497, |
| "step": 455168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253965144595356e-05, |
| "loss": 3.7501, |
| "step": 455680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.253128187724677e-05, |
| "loss": 3.7455, |
| "step": 456192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.252289592973625e-05, |
| "loss": 3.7596, |
| "step": 456704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.251450998222573e-05, |
| "loss": 3.7354, |
| "step": 457216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.2506140413518936e-05, |
| "loss": 3.7593, |
| "step": 457728 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8684678077697754, |
| "eval_runtime": 313.2617, |
| "eval_samples_per_second": 1218.122, |
| "eval_steps_per_second": 38.067, |
| "step": 457920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2497754466008416e-05, |
| "loss": 3.7403, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2489368518497896e-05, |
| "loss": 3.7321, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2480982570987376e-05, |
| "loss": 3.7491, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2472596623476856e-05, |
| "loss": 3.7433, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2464210675966336e-05, |
| "loss": 3.7567, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2455824728455816e-05, |
| "loss": 3.7415, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2447455159749025e-05, |
| "loss": 3.7352, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243906921223851e-05, |
| "loss": 3.7349, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243068326472799e-05, |
| "loss": 3.741, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.242229731721747e-05, |
| "loss": 3.749, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2413911369706945e-05, |
| "loss": 3.7378, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2405525422196424e-05, |
| "loss": 3.7403, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.239715585348964e-05, |
| "loss": 3.7409, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2388769905979114e-05, |
| "loss": 3.7342, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2380383958468593e-05, |
| "loss": 3.7401, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2371998010958073e-05, |
| "loss": 3.7285, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2363612063447553e-05, |
| "loss": 3.7326, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235522611593703e-05, |
| "loss": 3.7373, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234684016842651e-05, |
| "loss": 3.7343, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233845422091599e-05, |
| "loss": 3.7456, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233008465220921e-05, |
| "loss": 3.7451, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232169870469869e-05, |
| "loss": 3.7361, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.231331275718817e-05, |
| "loss": 3.7415, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230492680967765e-05, |
| "loss": 3.7468, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.229657361977459e-05, |
| "loss": 3.7237, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228818767226407e-05, |
| "loss": 3.7492, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227980172475355e-05, |
| "loss": 3.7306, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227141577724303e-05, |
| "loss": 3.7292, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.226302982973251e-05, |
| "loss": 3.7261, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.225464388222199e-05, |
| "loss": 3.7334, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.224625793471147e-05, |
| "loss": 3.7359, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.223788836600468e-05, |
| "loss": 3.7391, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222950241849416e-05, |
| "loss": 3.7421, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222111647098364e-05, |
| "loss": 3.7442, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221273052347312e-05, |
| "loss": 3.7297, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.22043445759626e-05, |
| "loss": 3.7401, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219597500725581e-05, |
| "loss": 3.7375, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.218758905974529e-05, |
| "loss": 3.7294, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217920311223477e-05, |
| "loss": 3.7279, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217081716472425e-05, |
| "loss": 3.7336, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216243121721373e-05, |
| "loss": 3.7307, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215404526970321e-05, |
| "loss": 3.7423, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214565932219269e-05, |
| "loss": 3.7304, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.213727337468217e-05, |
| "loss": 3.73, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2128920184779116e-05, |
| "loss": 3.7424, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2120534237268596e-05, |
| "loss": 3.7305, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2112148289758076e-05, |
| "loss": 3.7359, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2103762342247556e-05, |
| "loss": 3.7283, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2095376394737036e-05, |
| "loss": 3.7004, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2087006826030245e-05, |
| "loss": 3.7427, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2078620878519725e-05, |
| "loss": 3.7298, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2070234931009205e-05, |
| "loss": 3.7304, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2061848983498685e-05, |
| "loss": 3.7194, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2053463035988165e-05, |
| "loss": 3.7116, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2045077088477645e-05, |
| "loss": 3.7159, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2036691140967125e-05, |
| "loss": 3.7263, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2028321572260334e-05, |
| "loss": 3.7226, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.201993562474982e-05, |
| "loss": 3.7232, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20115496772393e-05, |
| "loss": 3.7336, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.200316372972878e-05, |
| "loss": 3.718, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1994777782218254e-05, |
| "loss": 3.7181, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1986391834707734e-05, |
| "loss": 3.7309, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1978005887197214e-05, |
| "loss": 3.7045, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196963631849042e-05, |
| "loss": 3.7207, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19612503709799e-05, |
| "loss": 3.7235, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195286442346938e-05, |
| "loss": 3.7324, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194447847595886e-05, |
| "loss": 3.706, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193609252844834e-05, |
| "loss": 3.7219, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.192770658093782e-05, |
| "loss": 3.7078, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191933701223104e-05, |
| "loss": 3.7325, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191095106472052e-05, |
| "loss": 3.7307, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.190256511721e-05, |
| "loss": 3.7256, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189417916969948e-05, |
| "loss": 3.734, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188579322218896e-05, |
| "loss": 3.7325, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.187742365348217e-05, |
| "loss": 3.7472, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186903770597165e-05, |
| "loss": 3.7224, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.186065175846113e-05, |
| "loss": 3.7275, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.185226581095061e-05, |
| "loss": 3.7256, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.184387986344009e-05, |
| "loss": 3.7142, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1835510294733297e-05, |
| "loss": 3.7276, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1827124347222776e-05, |
| "loss": 3.7259, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1818738399712256e-05, |
| "loss": 3.7275, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181035245220174e-05, |
| "loss": 3.7312, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180198288349495e-05, |
| "loss": 3.7118, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.179359693598443e-05, |
| "loss": 3.7144, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178521098847391e-05, |
| "loss": 3.7163, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177682504096339e-05, |
| "loss": 3.7332, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176843909345287e-05, |
| "loss": 3.7198, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176006952474608e-05, |
| "loss": 3.7138, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175168357723556e-05, |
| "loss": 3.7188, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174329762972504e-05, |
| "loss": 3.7273, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173491168221452e-05, |
| "loss": 3.7147, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1726525734704e-05, |
| "loss": 3.7198, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171813978719348e-05, |
| "loss": 3.7328, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170975383968296e-05, |
| "loss": 3.7236, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170136789217244e-05, |
| "loss": 3.7267, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1693014702269386e-05, |
| "loss": 3.7098, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1684628754758866e-05, |
| "loss": 3.7247, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1676242807248346e-05, |
| "loss": 3.7165, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1667856859737826e-05, |
| "loss": 3.7264, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1659487291031035e-05, |
| "loss": 3.7273, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1651101343520515e-05, |
| "loss": 3.7245, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1642715396009995e-05, |
| "loss": 3.7216, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1634329448499475e-05, |
| "loss": 3.7254, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1625943500988955e-05, |
| "loss": 3.7052, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1617557553478435e-05, |
| "loss": 3.7135, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1609187984771644e-05, |
| "loss": 3.7053, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160080203726113e-05, |
| "loss": 3.725, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.159241608975061e-05, |
| "loss": 3.708, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1584030142240084e-05, |
| "loss": 3.7121, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15756605735333e-05, |
| "loss": 3.7075, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.156727462602278e-05, |
| "loss": 3.714, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155888867851226e-05, |
| "loss": 3.7179, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155050273100173e-05, |
| "loss": 3.7095, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.154211678349121e-05, |
| "loss": 3.7224, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.153374721478443e-05, |
| "loss": 3.7287, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152536126727391e-05, |
| "loss": 3.7142, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.151697531976338e-05, |
| "loss": 3.7214, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150858937225287e-05, |
| "loss": 3.7165, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150020342474235e-05, |
| "loss": 3.7195, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149183385603556e-05, |
| "loss": 3.7128, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.148344790852504e-05, |
| "loss": 3.7227, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147506196101452e-05, |
| "loss": 3.7061, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1466676013504e-05, |
| "loss": 3.7219, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145829006599348e-05, |
| "loss": 3.7063, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144990411848296e-05, |
| "loss": 3.7132, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144151817097244e-05, |
| "loss": 3.7111, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143313222346192e-05, |
| "loss": 3.7077, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1424762654755126e-05, |
| "loss": 3.7062, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1416376707244606e-05, |
| "loss": 3.721, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1407990759734086e-05, |
| "loss": 3.7136, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1399604812223566e-05, |
| "loss": 3.7195, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139121886471305e-05, |
| "loss": 3.7045, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138284929600626e-05, |
| "loss": 3.7253, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137446334849574e-05, |
| "loss": 3.7201, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136607740098522e-05, |
| "loss": 3.7186, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13576914534747e-05, |
| "loss": 3.7162, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134930550596418e-05, |
| "loss": 3.7133, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.134091955845366e-05, |
| "loss": 3.716, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.133253361094314e-05, |
| "loss": 3.728, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.132414766343262e-05, |
| "loss": 3.7061, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.131577809472583e-05, |
| "loss": 3.7144, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.130740852601904e-05, |
| "loss": 3.7146, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.129902257850852e-05, |
| "loss": 3.7162, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290636630998006e-05, |
| "loss": 3.7073, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1282250683487486e-05, |
| "loss": 3.7097, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1273864735976966e-05, |
| "loss": 3.7245, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1265495167270175e-05, |
| "loss": 3.7034, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1257109219759655e-05, |
| "loss": 3.723, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.858818292617798, |
| "eval_runtime": 305.3519, |
| "eval_samples_per_second": 1249.676, |
| "eval_steps_per_second": 39.053, |
| "step": 534240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1248723272249135e-05, |
| "loss": 3.7058, |
| "step": 534528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1240337324738615e-05, |
| "loss": 3.6955, |
| "step": 535040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1231951377228095e-05, |
| "loss": 3.715, |
| "step": 535552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1223581808521304e-05, |
| "loss": 3.7045, |
| "step": 536064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1215195861010784e-05, |
| "loss": 3.7199, |
| "step": 536576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1206809913500264e-05, |
| "loss": 3.7102, |
| "step": 537088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1198423965989744e-05, |
| "loss": 3.7028, |
| "step": 537600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1190038018479224e-05, |
| "loss": 3.7005, |
| "step": 538112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1181652070968704e-05, |
| "loss": 3.7064, |
| "step": 538624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.117328250226192e-05, |
| "loss": 3.713, |
| "step": 539136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.116489655475139e-05, |
| "loss": 3.7023, |
| "step": 539648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.115651060724087e-05, |
| "loss": 3.7057, |
| "step": 540160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.114812465973035e-05, |
| "loss": 3.7048, |
| "step": 540672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.113975509102357e-05, |
| "loss": 3.699, |
| "step": 541184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.113136914351304e-05, |
| "loss": 3.7058, |
| "step": 541696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.112298319600252e-05, |
| "loss": 3.6949, |
| "step": 542208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1114597248492e-05, |
| "loss": 3.6974, |
| "step": 542720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.110621130098148e-05, |
| "loss": 3.7025, |
| "step": 543232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.10978417322747e-05, |
| "loss": 3.7023, |
| "step": 543744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108945578476418e-05, |
| "loss": 3.7072, |
| "step": 544256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.108106983725366e-05, |
| "loss": 3.7125, |
| "step": 544768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.107268388974314e-05, |
| "loss": 3.7043, |
| "step": 545280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.106429794223262e-05, |
| "loss": 3.7058, |
| "step": 545792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.10559119947221e-05, |
| "loss": 3.7077, |
| "step": 546304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.104754242601531e-05, |
| "loss": 3.6964, |
| "step": 546816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103915647850479e-05, |
| "loss": 3.7125, |
| "step": 547328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.103077053099427e-05, |
| "loss": 3.6957, |
| "step": 547840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1022384583483747e-05, |
| "loss": 3.7002, |
| "step": 548352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1013998635973227e-05, |
| "loss": 3.6956, |
| "step": 548864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.1005612688462706e-05, |
| "loss": 3.6952, |
| "step": 549376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0997243119755916e-05, |
| "loss": 3.7003, |
| "step": 549888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0988857172245396e-05, |
| "loss": 3.7078, |
| "step": 550400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0980471224734875e-05, |
| "loss": 3.7049, |
| "step": 550912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.097208527722436e-05, |
| "loss": 3.7087, |
| "step": 551424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.096371570851757e-05, |
| "loss": 3.6954, |
| "step": 551936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.095532976100705e-05, |
| "loss": 3.7075, |
| "step": 552448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.094694381349653e-05, |
| "loss": 3.7045, |
| "step": 552960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.093855786598601e-05, |
| "loss": 3.6933, |
| "step": 553472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.093018829727922e-05, |
| "loss": 3.6985, |
| "step": 553984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.09218023497687e-05, |
| "loss": 3.6991, |
| "step": 554496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.091341640225818e-05, |
| "loss": 3.696, |
| "step": 555008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.090503045474766e-05, |
| "loss": 3.7075, |
| "step": 555520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.089664450723714e-05, |
| "loss": 3.698, |
| "step": 556032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.088827493853035e-05, |
| "loss": 3.6956, |
| "step": 556544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.087988899101983e-05, |
| "loss": 3.7074, |
| "step": 557056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0871503043509316e-05, |
| "loss": 3.697, |
| "step": 557568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0863117095998796e-05, |
| "loss": 3.7005, |
| "step": 558080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0854731148488276e-05, |
| "loss": 3.6975, |
| "step": 558592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0846345200977756e-05, |
| "loss": 3.6661, |
| "step": 559104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0837975632270965e-05, |
| "loss": 3.7091, |
| "step": 559616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0829589684760445e-05, |
| "loss": 3.694, |
| "step": 560128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0821203737249925e-05, |
| "loss": 3.7012, |
| "step": 560640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0812817789739405e-05, |
| "loss": 3.6831, |
| "step": 561152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.080443184222888e-05, |
| "loss": 3.6799, |
| "step": 561664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.079604589471836e-05, |
| "loss": 3.681, |
| "step": 562176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.078765994720784e-05, |
| "loss": 3.6904, |
| "step": 562688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0779290378501054e-05, |
| "loss": 3.6933, |
| "step": 563200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0770904430990534e-05, |
| "loss": 3.6875, |
| "step": 563712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0762518483480014e-05, |
| "loss": 3.706, |
| "step": 564224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0754132535969494e-05, |
| "loss": 3.6847, |
| "step": 564736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.07457629672627e-05, |
| "loss": 3.6871, |
| "step": 565248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.073737701975218e-05, |
| "loss": 3.6989, |
| "step": 565760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072899107224166e-05, |
| "loss": 3.6738, |
| "step": 566272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.072060512473114e-05, |
| "loss": 3.6854, |
| "step": 566784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.071223555602435e-05, |
| "loss": 3.6917, |
| "step": 567296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.070384960851383e-05, |
| "loss": 3.7, |
| "step": 567808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.069546366100331e-05, |
| "loss": 3.6752, |
| "step": 568320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.068707771349279e-05, |
| "loss": 3.6869, |
| "step": 568832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067869176598227e-05, |
| "loss": 3.6768, |
| "step": 569344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.067032219727549e-05, |
| "loss": 3.7017, |
| "step": 569856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.066193624976497e-05, |
| "loss": 3.6969, |
| "step": 570368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.065355030225445e-05, |
| "loss": 3.6941, |
| "step": 570880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.064516435474393e-05, |
| "loss": 3.6991, |
| "step": 571392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.063677840723341e-05, |
| "loss": 3.7004, |
| "step": 571904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0628408838526616e-05, |
| "loss": 3.7141, |
| "step": 572416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0620022891016096e-05, |
| "loss": 3.6894, |
| "step": 572928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0611636943505576e-05, |
| "loss": 3.6957, |
| "step": 573440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0603250995995056e-05, |
| "loss": 3.6945, |
| "step": 573952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0594881427288265e-05, |
| "loss": 3.6811, |
| "step": 574464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0586495479777745e-05, |
| "loss": 3.6942, |
| "step": 574976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0578109532267225e-05, |
| "loss": 3.6974, |
| "step": 575488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.0569723584756705e-05, |
| "loss": 3.6965, |
| "step": 576000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.056133763724619e-05, |
| "loss": 3.698, |
| "step": 576512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.05529680685394e-05, |
| "loss": 3.6838, |
| "step": 577024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.054458212102888e-05, |
| "loss": 3.6798, |
| "step": 577536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.053619617351836e-05, |
| "loss": 3.6852, |
| "step": 578048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.052781022600784e-05, |
| "loss": 3.7004, |
| "step": 578560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051942427849732e-05, |
| "loss": 3.6902, |
| "step": 579072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.051105470979053e-05, |
| "loss": 3.6825, |
| "step": 579584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.050266876228001e-05, |
| "loss": 3.6874, |
| "step": 580096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.049428281476949e-05, |
| "loss": 3.6949, |
| "step": 580608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.048589686725897e-05, |
| "loss": 3.6854, |
| "step": 581120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.047752729855218e-05, |
| "loss": 3.6855, |
| "step": 581632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.046914135104166e-05, |
| "loss": 3.7009, |
| "step": 582144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.046075540353114e-05, |
| "loss": 3.6928, |
| "step": 582656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0452369456020625e-05, |
| "loss": 3.693, |
| "step": 583168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0443983508510105e-05, |
| "loss": 3.6824, |
| "step": 583680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0435597560999585e-05, |
| "loss": 3.6875, |
| "step": 584192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0427227992292794e-05, |
| "loss": 3.683, |
| "step": 584704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0418842044782274e-05, |
| "loss": 3.6986, |
| "step": 585216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0410456097271754e-05, |
| "loss": 3.6941, |
| "step": 585728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0402070149761234e-05, |
| "loss": 3.6952, |
| "step": 586240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.039370058105444e-05, |
| "loss": 3.6916, |
| "step": 586752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.038531463354392e-05, |
| "loss": 3.6916, |
| "step": 587264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.03769286860334e-05, |
| "loss": 3.6818, |
| "step": 587776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036854273852288e-05, |
| "loss": 3.679, |
| "step": 588288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.036017316981609e-05, |
| "loss": 3.6778, |
| "step": 588800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.035178722230558e-05, |
| "loss": 3.6939, |
| "step": 589312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.034340127479506e-05, |
| "loss": 3.6773, |
| "step": 589824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.033501532728454e-05, |
| "loss": 3.6778, |
| "step": 590336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.032662937977401e-05, |
| "loss": 3.6746, |
| "step": 590848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.031825981106723e-05, |
| "loss": 3.6876, |
| "step": 591360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030987386355671e-05, |
| "loss": 3.6854, |
| "step": 591872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.030148791604618e-05, |
| "loss": 3.6755, |
| "step": 592384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.029310196853566e-05, |
| "loss": 3.6947, |
| "step": 592896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.028473239982888e-05, |
| "loss": 3.6927, |
| "step": 593408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.027634645231836e-05, |
| "loss": 3.6834, |
| "step": 593920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.026796050480783e-05, |
| "loss": 3.6927, |
| "step": 594432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.025957455729732e-05, |
| "loss": 3.6874, |
| "step": 594944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.02511886097868e-05, |
| "loss": 3.6851, |
| "step": 595456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.024281904108001e-05, |
| "loss": 3.685, |
| "step": 595968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0234433093569486e-05, |
| "loss": 3.6888, |
| "step": 596480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0226047146058966e-05, |
| "loss": 3.676, |
| "step": 596992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0217661198548446e-05, |
| "loss": 3.6897, |
| "step": 597504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0209291629841655e-05, |
| "loss": 3.6798, |
| "step": 598016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0200905682331135e-05, |
| "loss": 3.6834, |
| "step": 598528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0192519734820615e-05, |
| "loss": 3.677, |
| "step": 599040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0184133787310095e-05, |
| "loss": 3.6757, |
| "step": 599552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0175764218603304e-05, |
| "loss": 3.6764, |
| "step": 600064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0167378271092784e-05, |
| "loss": 3.6901, |
| "step": 600576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015899232358227e-05, |
| "loss": 3.6823, |
| "step": 601088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.015060637607175e-05, |
| "loss": 3.6919, |
| "step": 601600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.014223680736496e-05, |
| "loss": 3.6719, |
| "step": 602112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.013385085985444e-05, |
| "loss": 3.6982, |
| "step": 602624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.012546491234392e-05, |
| "loss": 3.6857, |
| "step": 603136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.01170789648334e-05, |
| "loss": 3.6885, |
| "step": 603648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010869301732288e-05, |
| "loss": 3.6875, |
| "step": 604160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.010032344861609e-05, |
| "loss": 3.6815, |
| "step": 604672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.009193750110557e-05, |
| "loss": 3.6847, |
| "step": 605184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.008355155359505e-05, |
| "loss": 3.6966, |
| "step": 605696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.007516560608453e-05, |
| "loss": 3.6762, |
| "step": 606208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.006679603737774e-05, |
| "loss": 3.6856, |
| "step": 606720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0058410089867224e-05, |
| "loss": 3.6864, |
| "step": 607232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0050024142356704e-05, |
| "loss": 3.6807, |
| "step": 607744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.0041638194846184e-05, |
| "loss": 3.6783, |
| "step": 608256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.003326862613939e-05, |
| "loss": 3.6811, |
| "step": 608768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.002488267862887e-05, |
| "loss": 3.6929, |
| "step": 609280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.001649673111835e-05, |
| "loss": 3.6761, |
| "step": 609792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.000811078360783e-05, |
| "loss": 3.6924, |
| "step": 610304 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.851468563079834, |
| "eval_runtime": 305.33, |
| "eval_samples_per_second": 1249.766, |
| "eval_steps_per_second": 39.056, |
| "step": 610560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999972483609731e-05, |
| "loss": 3.6725, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999133888858679e-05, |
| "loss": 3.6706, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998295294107627e-05, |
| "loss": 3.6791, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.997456699356575e-05, |
| "loss": 3.6789, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.996619742485896e-05, |
| "loss": 3.6921, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995781147734844e-05, |
| "loss": 3.6783, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994942552983792e-05, |
| "loss": 3.673, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994103958232741e-05, |
| "loss": 3.6734, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993265363481689e-05, |
| "loss": 3.6722, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.99242840661101e-05, |
| "loss": 3.6866, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991589811859958e-05, |
| "loss": 3.6727, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990751217108906e-05, |
| "loss": 3.6726, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989912622357854e-05, |
| "loss": 3.6829, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.989075665487175e-05, |
| "loss": 3.6652, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9882370707361227e-05, |
| "loss": 3.6712, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9873984759850707e-05, |
| "loss": 3.6694, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865598812340187e-05, |
| "loss": 3.6669, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9857229243633396e-05, |
| "loss": 3.6729, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9848843296122876e-05, |
| "loss": 3.6714, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9840457348612356e-05, |
| "loss": 3.6738, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.983207140110184e-05, |
| "loss": 3.6867, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982370183239505e-05, |
| "loss": 3.6752, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981531588488453e-05, |
| "loss": 3.6785, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980692993737401e-05, |
| "loss": 3.6731, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979854398986349e-05, |
| "loss": 3.6696, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9790158042352964e-05, |
| "loss": 3.681, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978178847364618e-05, |
| "loss": 3.6691, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977340252613566e-05, |
| "loss": 3.673, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.976501657862514e-05, |
| "loss": 3.6623, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.975663063111461e-05, |
| "loss": 3.6688, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.974826106240783e-05, |
| "loss": 3.668, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973987511489731e-05, |
| "loss": 3.6769, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.973148916738679e-05, |
| "loss": 3.6799, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.972310321987627e-05, |
| "loss": 3.6829, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.971471727236575e-05, |
| "loss": 3.6691, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9706347703658965e-05, |
| "loss": 3.6723, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.969796175614844e-05, |
| "loss": 3.678, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.968957580863792e-05, |
| "loss": 3.6676, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.96811898611274e-05, |
| "loss": 3.6665, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9672820292420614e-05, |
| "loss": 3.6679, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966443434491009e-05, |
| "loss": 3.6692, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.965604839739957e-05, |
| "loss": 3.6705, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964766244988905e-05, |
| "loss": 3.6764, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963929288118226e-05, |
| "loss": 3.6696, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963090693367174e-05, |
| "loss": 3.6774, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.962252098616122e-05, |
| "loss": 3.6738, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.96141350386507e-05, |
| "loss": 3.6687, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960576546994391e-05, |
| "loss": 3.6707, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959737952243339e-05, |
| "loss": 3.636, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958899357492287e-05, |
| "loss": 3.6817, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958060762741235e-05, |
| "loss": 3.6636, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.957223805870556e-05, |
| "loss": 3.6724, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956385211119504e-05, |
| "loss": 3.6576, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.955546616368452e-05, |
| "loss": 3.6524, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9547080216174e-05, |
| "loss": 3.6488, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9538710647467217e-05, |
| "loss": 3.6609, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9530324699956696e-05, |
| "loss": 3.666, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9521938752446176e-05, |
| "loss": 3.6605, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9513552804935656e-05, |
| "loss": 3.6739, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9505166857425136e-05, |
| "loss": 3.6615, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9496797288718345e-05, |
| "loss": 3.6577, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9488411341207825e-05, |
| "loss": 3.6688, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9480025393697305e-05, |
| "loss": 3.6481, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9471639446186785e-05, |
| "loss": 3.6561, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9463269877479994e-05, |
| "loss": 3.6654, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9454883929969474e-05, |
| "loss": 3.6677, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9446497982458954e-05, |
| "loss": 3.6477, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.943812841375217e-05, |
| "loss": 3.6569, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942974246624165e-05, |
| "loss": 3.6494, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.942135651873113e-05, |
| "loss": 3.6721, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.941297057122061e-05, |
| "loss": 3.6701, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.940460100251382e-05, |
| "loss": 3.6646, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.93962150550033e-05, |
| "loss": 3.6732, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.938782910749278e-05, |
| "loss": 3.6711, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937944315998226e-05, |
| "loss": 3.6862, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.937107359127547e-05, |
| "loss": 3.6639, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936268764376495e-05, |
| "loss": 3.6669, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935430169625443e-05, |
| "loss": 3.6614, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934591574874391e-05, |
| "loss": 3.6601, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9337546180037124e-05, |
| "loss": 3.663, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9329160232526604e-05, |
| "loss": 3.6701, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9320774285016084e-05, |
| "loss": 3.6681, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9312388337505564e-05, |
| "loss": 3.6734, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.930401876879877e-05, |
| "loss": 3.6533, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.929563282128825e-05, |
| "loss": 3.6515, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.928724687377773e-05, |
| "loss": 3.6554, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927886092626721e-05, |
| "loss": 3.6753, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927047497875669e-05, |
| "loss": 3.6655, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92621054100499e-05, |
| "loss": 3.6529, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925371946253938e-05, |
| "loss": 3.6592, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.924533351502886e-05, |
| "loss": 3.6656, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923694756751835e-05, |
| "loss": 3.6587, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922857799881156e-05, |
| "loss": 3.6558, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922019205130104e-05, |
| "loss": 3.6733, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.921180610379052e-05, |
| "loss": 3.6656, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.920342015628e-05, |
| "loss": 3.6655, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9195050587573206e-05, |
| "loss": 3.6557, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9186664640062686e-05, |
| "loss": 3.6591, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9178278692552166e-05, |
| "loss": 3.6579, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9169892745041646e-05, |
| "loss": 3.6702, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9161506797531126e-05, |
| "loss": 3.666, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9153137228824335e-05, |
| "loss": 3.6662, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9144751281313815e-05, |
| "loss": 3.6667, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.91363653338033e-05, |
| "loss": 3.6596, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912797938629278e-05, |
| "loss": 3.6588, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911960981758599e-05, |
| "loss": 3.6499, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911122387007547e-05, |
| "loss": 3.6465, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910283792256495e-05, |
| "loss": 3.6688, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909445197505443e-05, |
| "loss": 3.654, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.908608240634764e-05, |
| "loss": 3.648, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907769645883712e-05, |
| "loss": 3.6486, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90693105113266e-05, |
| "loss": 3.6527, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906092456381608e-05, |
| "loss": 3.6627, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.905255499510929e-05, |
| "loss": 3.6519, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.904416904759877e-05, |
| "loss": 3.6637, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035783100088256e-05, |
| "loss": 3.6661, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9027397152577736e-05, |
| "loss": 3.6613, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9019011205067216e-05, |
| "loss": 3.6629, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9010641636360425e-05, |
| "loss": 3.6584, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9002255688849905e-05, |
| "loss": 3.6589, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8993869741339385e-05, |
| "loss": 3.6606, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8985483793828865e-05, |
| "loss": 3.6618, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8977114225122074e-05, |
| "loss": 3.6544, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8968728277611554e-05, |
| "loss": 3.6617, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8960342330101034e-05, |
| "loss": 3.6556, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8951956382590514e-05, |
| "loss": 3.6539, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894358681388372e-05, |
| "loss": 3.6507, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.89352008663732e-05, |
| "loss": 3.6493, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892681491886269e-05, |
| "loss": 3.6518, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891842897135217e-05, |
| "loss": 3.6614, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891004302384164e-05, |
| "loss": 3.6555, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890167345513486e-05, |
| "loss": 3.6693, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.889328750762434e-05, |
| "loss": 3.6434, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.888490156011381e-05, |
| "loss": 3.669, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.887651561260329e-05, |
| "loss": 3.6588, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.886814604389651e-05, |
| "loss": 3.6618, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885976009638599e-05, |
| "loss": 3.6607, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885137414887546e-05, |
| "loss": 3.6539, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884298820136494e-05, |
| "loss": 3.6596, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8834618632658156e-05, |
| "loss": 3.6686, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.882623268514764e-05, |
| "loss": 3.6499, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8817846737637116e-05, |
| "loss": 3.6603, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8809460790126596e-05, |
| "loss": 3.6564, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880109122141981e-05, |
| "loss": 3.6545, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8792705273909285e-05, |
| "loss": 3.6548, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8784319326398765e-05, |
| "loss": 3.6536, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8775933378888245e-05, |
| "loss": 3.6638, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876756381018146e-05, |
| "loss": 3.6526, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8759177862670934e-05, |
| "loss": 3.6614, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8477835655212402, |
| "eval_runtime": 309.3615, |
| "eval_samples_per_second": 1233.479, |
| "eval_steps_per_second": 38.547, |
| "step": 686880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8750791915160414e-05, |
| "loss": 3.6471, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8742405967649894e-05, |
| "loss": 3.6441, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.873402002013938e-05, |
| "loss": 3.6531, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.872565045143259e-05, |
| "loss": 3.6523, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.871726450392207e-05, |
| "loss": 3.6682, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870887855641155e-05, |
| "loss": 3.6521, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.870049260890103e-05, |
| "loss": 3.6487, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.869212304019424e-05, |
| "loss": 3.6483, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.868373709268372e-05, |
| "loss": 3.644, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86753511451732e-05, |
| "loss": 3.6606, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866696519766268e-05, |
| "loss": 3.6459, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865859562895589e-05, |
| "loss": 3.6498, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865020968144537e-05, |
| "loss": 3.6567, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864182373393485e-05, |
| "loss": 3.6401, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8633437786424334e-05, |
| "loss": 3.6457, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8625068217717544e-05, |
| "loss": 3.6431, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8616682270207024e-05, |
| "loss": 3.6431, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8608296322696503e-05, |
| "loss": 3.6424, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8599910375185983e-05, |
| "loss": 3.6485, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859154080647919e-05, |
| "loss": 3.6488, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.858315485896867e-05, |
| "loss": 3.6609, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.857476891145815e-05, |
| "loss": 3.6484, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.856638296394763e-05, |
| "loss": 3.6533, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.855799701643711e-05, |
| "loss": 3.6465, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854962744773032e-05, |
| "loss": 3.6484, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.85412415002198e-05, |
| "loss": 3.653, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.853285555270929e-05, |
| "loss": 3.6416, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852446960519877e-05, |
| "loss": 3.6508, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851608365768825e-05, |
| "loss": 3.6344, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850769771017773e-05, |
| "loss": 3.6427, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849932814147094e-05, |
| "loss": 3.6448, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849094219396042e-05, |
| "loss": 3.6495, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84825562464499e-05, |
| "loss": 3.655, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8474186677743106e-05, |
| "loss": 3.6559, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8465800730232586e-05, |
| "loss": 3.6451, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8457414782722066e-05, |
| "loss": 3.6401, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8449028835211546e-05, |
| "loss": 3.6565, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8440642887701026e-05, |
| "loss": 3.6435, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.843227331899424e-05, |
| "loss": 3.6415, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.842388737148372e-05, |
| "loss": 3.6428, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84155014239732e-05, |
| "loss": 3.6458, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.840711547646268e-05, |
| "loss": 3.6461, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839874590775589e-05, |
| "loss": 3.6488, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.839035996024537e-05, |
| "loss": 3.648, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.838197401273485e-05, |
| "loss": 3.6465, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.837358806522433e-05, |
| "loss": 3.649, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.836521849651754e-05, |
| "loss": 3.642, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.835683254900702e-05, |
| "loss": 3.6461, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.83484466014965e-05, |
| "loss": 3.6117, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.834006065398598e-05, |
| "loss": 3.6531, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8331691085279195e-05, |
| "loss": 3.6377, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8323305137768675e-05, |
| "loss": 3.6527, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8314919190258155e-05, |
| "loss": 3.6325, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8306533242747635e-05, |
| "loss": 3.6266, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8298163674040844e-05, |
| "loss": 3.6233, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8289777726530324e-05, |
| "loss": 3.6343, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8281391779019804e-05, |
| "loss": 3.6484, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8273005831509284e-05, |
| "loss": 3.6304, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8264636262802493e-05, |
| "loss": 3.6514, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.825625031529197e-05, |
| "loss": 3.6392, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824786436778145e-05, |
| "loss": 3.6251, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823947842027093e-05, |
| "loss": 3.6448, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823109247276041e-05, |
| "loss": 3.6283, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822272290405363e-05, |
| "loss": 3.6273, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.821433695654311e-05, |
| "loss": 3.6422, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.820595100903259e-05, |
| "loss": 3.6447, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.819756506152207e-05, |
| "loss": 3.6273, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818919549281528e-05, |
| "loss": 3.6317, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818080954530476e-05, |
| "loss": 3.6256, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817242359779424e-05, |
| "loss": 3.6445, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.816403765028371e-05, |
| "loss": 3.6454, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.815566808157693e-05, |
| "loss": 3.6397, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.814728213406641e-05, |
| "loss": 3.6456, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813889618655589e-05, |
| "loss": 3.6487, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813051023904537e-05, |
| "loss": 3.66, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.812214067033858e-05, |
| "loss": 3.6407, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.811375472282806e-05, |
| "loss": 3.6452, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.810536877531754e-05, |
| "loss": 3.6354, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8096982827807016e-05, |
| "loss": 3.6371, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808861325910023e-05, |
| "loss": 3.6377, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808022731158971e-05, |
| "loss": 3.6479, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8071841364079185e-05, |
| "loss": 3.641, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8063455416568665e-05, |
| "loss": 3.6511, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.805508584786188e-05, |
| "loss": 3.6253, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804669990035136e-05, |
| "loss": 3.6279, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8038313952840834e-05, |
| "loss": 3.6347, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.802992800533032e-05, |
| "loss": 3.6448, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8021558436623536e-05, |
| "loss": 3.6415, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8013172489113016e-05, |
| "loss": 3.6312, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800478654160249e-05, |
| "loss": 3.6349, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799640059409197e-05, |
| "loss": 3.641, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7988031025385185e-05, |
| "loss": 3.6337, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797964507787466e-05, |
| "loss": 3.6306, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797125913036414e-05, |
| "loss": 3.6484, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796287318285362e-05, |
| "loss": 3.6424, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7954503614146834e-05, |
| "loss": 3.6428, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.794611766663631e-05, |
| "loss": 3.6326, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793773171912579e-05, |
| "loss": 3.6349, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7929345771615274e-05, |
| "loss": 3.6319, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792097620290849e-05, |
| "loss": 3.6464, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.791259025539796e-05, |
| "loss": 3.6446, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.790420430788744e-05, |
| "loss": 3.6377, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.789581836037692e-05, |
| "loss": 3.6413, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.788744879167013e-05, |
| "loss": 3.6363, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787906284415961e-05, |
| "loss": 3.638, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787067689664909e-05, |
| "loss": 3.6231, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.786229094913857e-05, |
| "loss": 3.626, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785392138043178e-05, |
| "loss": 3.6392, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.784553543292126e-05, |
| "loss": 3.6345, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.783714948541074e-05, |
| "loss": 3.6223, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782876353790023e-05, |
| "loss": 3.6268, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782039396919344e-05, |
| "loss": 3.6277, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781200802168292e-05, |
| "loss": 3.6363, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78036220741724e-05, |
| "loss": 3.6266, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.779523612666188e-05, |
| "loss": 3.6423, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7786866557955086e-05, |
| "loss": 3.6415, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7778480610444566e-05, |
| "loss": 3.638, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7770094662934046e-05, |
| "loss": 3.634, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7761708715423526e-05, |
| "loss": 3.6384, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7753339146716735e-05, |
| "loss": 3.636, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7744953199206215e-05, |
| "loss": 3.637, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7736567251695695e-05, |
| "loss": 3.6365, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.772818130418518e-05, |
| "loss": 3.6281, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771981173547839e-05, |
| "loss": 3.6372, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.771142578796787e-05, |
| "loss": 3.6345, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.770303984045735e-05, |
| "loss": 3.627, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.769465389294683e-05, |
| "loss": 3.6309, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.768628432424004e-05, |
| "loss": 3.6276, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767789837672952e-05, |
| "loss": 3.6223, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7669512429219e-05, |
| "loss": 3.6399, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766112648170848e-05, |
| "loss": 3.6309, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765275691300169e-05, |
| "loss": 3.6457, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.764437096549117e-05, |
| "loss": 3.6187, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.763598501798065e-05, |
| "loss": 3.6416, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7627599070470135e-05, |
| "loss": 3.6359, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7619229501763344e-05, |
| "loss": 3.6393, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7610843554252824e-05, |
| "loss": 3.6359, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7602457606742304e-05, |
| "loss": 3.6287, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7594071659231784e-05, |
| "loss": 3.6411, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758570209052499e-05, |
| "loss": 3.6424, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.757731614301447e-05, |
| "loss": 3.6292, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756893019550395e-05, |
| "loss": 3.634, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.756054424799343e-05, |
| "loss": 3.6337, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755217467928664e-05, |
| "loss": 3.631, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.754378873177612e-05, |
| "loss": 3.631, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75354027842656e-05, |
| "loss": 3.6271, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.752701683675509e-05, |
| "loss": 3.6396, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75186472680483e-05, |
| "loss": 3.6327, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751026132053778e-05, |
| "loss": 3.6384, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8437793254852295, |
| "eval_runtime": 309.2716, |
| "eval_samples_per_second": 1233.838, |
| "eval_steps_per_second": 38.558, |
| "step": 763200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.750187537302726e-05, |
| "loss": 3.6201, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.749348942551674e-05, |
| "loss": 3.6218, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.748510347800622e-05, |
| "loss": 3.6266, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.74767175304957e-05, |
| "loss": 3.6288, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.746834796178891e-05, |
| "loss": 3.6438, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.745996201427839e-05, |
| "loss": 3.6304, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.745157606676787e-05, |
| "loss": 3.6243, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.744319011925735e-05, |
| "loss": 3.6261, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.743480417174683e-05, |
| "loss": 3.621, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.742643460304004e-05, |
| "loss": 3.6333, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.741804865552952e-05, |
| "loss": 3.625, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7409662708019e-05, |
| "loss": 3.6304, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.740127676050848e-05, |
| "loss": 3.6327, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.739290719180169e-05, |
| "loss": 3.6161, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.738452124429117e-05, |
| "loss": 3.6233, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.737613529678065e-05, |
| "loss": 3.6152, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.736774934927013e-05, |
| "loss": 3.6259, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735937978056334e-05, |
| "loss": 3.6193, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735099383305282e-05, |
| "loss": 3.6243, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.73426078855423e-05, |
| "loss": 3.6228, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.733422193803178e-05, |
| "loss": 3.6363, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7325852369324996e-05, |
| "loss": 3.6242, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7317466421814476e-05, |
| "loss": 3.6317, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7309080474303956e-05, |
| "loss": 3.6262, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7300694526793436e-05, |
| "loss": 3.6253, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7292324958086645e-05, |
| "loss": 3.6317, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7283939010576125e-05, |
| "loss": 3.6161, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7275553063065605e-05, |
| "loss": 3.6251, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7267167115555085e-05, |
| "loss": 3.615, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7258797546848294e-05, |
| "loss": 3.6171, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7250411599337774e-05, |
| "loss": 3.6249, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7242025651827254e-05, |
| "loss": 3.6278, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7233639704316734e-05, |
| "loss": 3.6255, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.722527013560994e-05, |
| "loss": 3.6376, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721688418809943e-05, |
| "loss": 3.6185, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720849824058891e-05, |
| "loss": 3.6199, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720011229307839e-05, |
| "loss": 3.6345, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71917427243716e-05, |
| "loss": 3.6219, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.718335677686108e-05, |
| "loss": 3.6199, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.717497082935056e-05, |
| "loss": 3.6223, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.716658488184004e-05, |
| "loss": 3.6165, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.715821531313325e-05, |
| "loss": 3.6241, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714982936562273e-05, |
| "loss": 3.6217, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714144341811221e-05, |
| "loss": 3.6268, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.713305747060168e-05, |
| "loss": 3.6212, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.71246879018949e-05, |
| "loss": 3.6288, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7116301954384384e-05, |
| "loss": 3.6195, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7107916006873863e-05, |
| "loss": 3.627, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709953005936334e-05, |
| "loss": 3.5925, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.709116049065655e-05, |
| "loss": 3.6305, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.708277454314603e-05, |
| "loss": 3.6122, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.707438859563551e-05, |
| "loss": 3.628, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7066002648124986e-05, |
| "loss": 3.6103, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.70576330794182e-05, |
| "loss": 3.6056, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704924713190768e-05, |
| "loss": 3.6021, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7040861184397155e-05, |
| "loss": 3.6115, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7032475236886635e-05, |
| "loss": 3.6252, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.702410566817985e-05, |
| "loss": 3.6075, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701571972066934e-05, |
| "loss": 3.6288, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700733377315881e-05, |
| "loss": 3.6198, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699894782564829e-05, |
| "loss": 3.6017, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6990578256941506e-05, |
| "loss": 3.6224, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6982192309430986e-05, |
| "loss": 3.6046, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.697380636192046e-05, |
| "loss": 3.6071, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.696542041440994e-05, |
| "loss": 3.6194, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6957050845703155e-05, |
| "loss": 3.619, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694866489819263e-05, |
| "loss": 3.6008, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694027895068211e-05, |
| "loss": 3.6139, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.693189300317159e-05, |
| "loss": 3.6051, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6923523434464804e-05, |
| "loss": 3.6163, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6915137486954284e-05, |
| "loss": 3.6249, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6906751539443764e-05, |
| "loss": 3.6172, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6898365591933244e-05, |
| "loss": 3.6255, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688999602322646e-05, |
| "loss": 3.6224, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688161007571593e-05, |
| "loss": 3.6377, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.687322412820541e-05, |
| "loss": 3.6202, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.686483818069489e-05, |
| "loss": 3.6259, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.68564686119881e-05, |
| "loss": 3.6094, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.684808266447758e-05, |
| "loss": 3.6148, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683969671696706e-05, |
| "loss": 3.6164, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683131076945654e-05, |
| "loss": 3.6221, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682294120074976e-05, |
| "loss": 3.6222, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681455525323924e-05, |
| "loss": 3.6283, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680616930572872e-05, |
| "loss": 3.6041, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.67977833582182e-05, |
| "loss": 3.6091, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678941378951141e-05, |
| "loss": 3.6078, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678102784200089e-05, |
| "loss": 3.6249, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.677264189449037e-05, |
| "loss": 3.6178, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.676425594697985e-05, |
| "loss": 3.611, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6755886378273056e-05, |
| "loss": 3.6144, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6747500430762536e-05, |
| "loss": 3.6179, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6739114483252016e-05, |
| "loss": 3.6108, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6730728535741496e-05, |
| "loss": 3.6084, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.672235896703471e-05, |
| "loss": 3.6249, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.671397301952419e-05, |
| "loss": 3.6246, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.670558707201367e-05, |
| "loss": 3.6209, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.669720112450315e-05, |
| "loss": 3.6083, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668883155579636e-05, |
| "loss": 3.6128, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.668044560828584e-05, |
| "loss": 3.6144, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.667205966077532e-05, |
| "loss": 3.6211, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.66636737132648e-05, |
| "loss": 3.6248, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.665530414455801e-05, |
| "loss": 3.6137, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664691819704749e-05, |
| "loss": 3.6205, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663853224953697e-05, |
| "loss": 3.6163, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663014630202645e-05, |
| "loss": 3.615, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6621776733319665e-05, |
| "loss": 3.6045, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6613390785809145e-05, |
| "loss": 3.604, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6605004838298625e-05, |
| "loss": 3.6151, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6596618890788105e-05, |
| "loss": 3.6133, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6588249322081314e-05, |
| "loss": 3.6025, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6579863374570794e-05, |
| "loss": 3.6072, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6571477427060274e-05, |
| "loss": 3.6016, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6563091479549754e-05, |
| "loss": 3.6173, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655472191084296e-05, |
| "loss": 3.6077, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.654633596333244e-05, |
| "loss": 3.6142, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653795001582192e-05, |
| "loss": 3.6213, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65295640683114e-05, |
| "loss": 3.6164, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652119449960462e-05, |
| "loss": 3.6121, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.65128085520941e-05, |
| "loss": 3.6214, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.650442260458358e-05, |
| "loss": 3.6143, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.649603665707306e-05, |
| "loss": 3.6173, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.648766708836627e-05, |
| "loss": 3.6113, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.647928114085575e-05, |
| "loss": 3.61, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.647089519334523e-05, |
| "loss": 3.6162, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.646250924583471e-05, |
| "loss": 3.6112, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.645413967712792e-05, |
| "loss": 3.6072, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.64457537296174e-05, |
| "loss": 3.6075, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.643736778210688e-05, |
| "loss": 3.6011, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6428981834596357e-05, |
| "loss": 3.604, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.642061226588957e-05, |
| "loss": 3.6172, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.641222631837905e-05, |
| "loss": 3.6099, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.640384037086853e-05, |
| "loss": 3.6219, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.639545442335801e-05, |
| "loss": 3.6017, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.638708485465122e-05, |
| "loss": 3.6225, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.63786989071407e-05, |
| "loss": 3.6148, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.637031295963018e-05, |
| "loss": 3.6152, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636192701211966e-05, |
| "loss": 3.6134, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.635355744341287e-05, |
| "loss": 3.6073, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.634517149590235e-05, |
| "loss": 3.622, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633678554839183e-05, |
| "loss": 3.6227, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632839960088131e-05, |
| "loss": 3.6053, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6320030032174526e-05, |
| "loss": 3.6135, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6311644084664006e-05, |
| "loss": 3.6151, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6303258137153486e-05, |
| "loss": 3.6097, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6294872189642966e-05, |
| "loss": 3.608, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6286502620936175e-05, |
| "loss": 3.6094, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6278116673425655e-05, |
| "loss": 3.6158, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6269730725915135e-05, |
| "loss": 3.6141, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6261344778404615e-05, |
| "loss": 3.615, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8439347743988037, |
| "eval_runtime": 303.6481, |
| "eval_samples_per_second": 1256.688, |
| "eval_steps_per_second": 39.272, |
| "step": 839520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6252958830894095e-05, |
| "loss": 3.5984, |
| "step": 839680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6244572883383575e-05, |
| "loss": 3.5993, |
| "step": 840192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6236186935873055e-05, |
| "loss": 3.6091, |
| "step": 840704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.622780098836253e-05, |
| "loss": 3.6092, |
| "step": 841216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6219431419655744e-05, |
| "loss": 3.6197, |
| "step": 841728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.621104547214523e-05, |
| "loss": 3.6099, |
| "step": 842240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.620265952463471e-05, |
| "loss": 3.6056, |
| "step": 842752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6194273577124184e-05, |
| "loss": 3.605, |
| "step": 843264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.61859040084174e-05, |
| "loss": 3.5984, |
| "step": 843776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.617751806090688e-05, |
| "loss": 3.6089, |
| "step": 844288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616913211339636e-05, |
| "loss": 3.6112, |
| "step": 844800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.616074616588583e-05, |
| "loss": 3.6056, |
| "step": 845312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.615239297598278e-05, |
| "loss": 3.6093, |
| "step": 845824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.614400702847226e-05, |
| "loss": 3.5952, |
| "step": 846336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.613562108096174e-05, |
| "loss": 3.6044, |
| "step": 846848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.612723513345122e-05, |
| "loss": 3.5927, |
| "step": 847360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.61188491859407e-05, |
| "loss": 3.6063, |
| "step": 847872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6110463238430184e-05, |
| "loss": 3.5996, |
| "step": 848384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.610207729091966e-05, |
| "loss": 3.6013, |
| "step": 848896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.609369134340914e-05, |
| "loss": 3.6007, |
| "step": 849408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.608532177470235e-05, |
| "loss": 3.6205, |
| "step": 849920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.607693582719183e-05, |
| "loss": 3.6001, |
| "step": 850432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6068549879681306e-05, |
| "loss": 3.6126, |
| "step": 850944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6060163932170786e-05, |
| "loss": 3.6033, |
| "step": 851456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6051794363464e-05, |
| "loss": 3.6063, |
| "step": 851968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6043408415953475e-05, |
| "loss": 3.6126, |
| "step": 852480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6035022468442955e-05, |
| "loss": 3.5948, |
| "step": 852992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6026636520932435e-05, |
| "loss": 3.6043, |
| "step": 853504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.601826695222565e-05, |
| "loss": 3.5975, |
| "step": 854016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.600988100471513e-05, |
| "loss": 3.5958, |
| "step": 854528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.600149505720461e-05, |
| "loss": 3.608, |
| "step": 855040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.599310910969409e-05, |
| "loss": 3.6076, |
| "step": 855552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.598473954098731e-05, |
| "loss": 3.5993, |
| "step": 856064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.597635359347678e-05, |
| "loss": 3.6212, |
| "step": 856576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.596796764596626e-05, |
| "loss": 3.602, |
| "step": 857088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.595958169845574e-05, |
| "loss": 3.5972, |
| "step": 857600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.595121212974895e-05, |
| "loss": 3.6113, |
| "step": 858112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.594282618223843e-05, |
| "loss": 3.5961, |
| "step": 858624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.593444023472791e-05, |
| "loss": 3.5984, |
| "step": 859136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.592605428721739e-05, |
| "loss": 3.605, |
| "step": 859648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5917668339706876e-05, |
| "loss": 3.5967, |
| "step": 860160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5909298771000085e-05, |
| "loss": 3.6017, |
| "step": 860672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5900912823489565e-05, |
| "loss": 3.6086, |
| "step": 861184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5892526875979045e-05, |
| "loss": 3.601, |
| "step": 861696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5884140928468525e-05, |
| "loss": 3.6051, |
| "step": 862208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5875771359761734e-05, |
| "loss": 3.6037, |
| "step": 862720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5867385412251214e-05, |
| "loss": 3.5986, |
| "step": 863232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5858999464740694e-05, |
| "loss": 3.6072, |
| "step": 863744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5850613517230174e-05, |
| "loss": 3.5743, |
| "step": 864256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.584224394852338e-05, |
| "loss": 3.606, |
| "step": 864768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.583385800101286e-05, |
| "loss": 3.5948, |
| "step": 865280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.582547205350234e-05, |
| "loss": 3.6075, |
| "step": 865792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.581708610599183e-05, |
| "loss": 3.5924, |
| "step": 866304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.580871653728504e-05, |
| "loss": 3.5827, |
| "step": 866816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.580033058977452e-05, |
| "loss": 3.5837, |
| "step": 867328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5791944642264e-05, |
| "loss": 3.5901, |
| "step": 867840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.578357507355721e-05, |
| "loss": 3.6044, |
| "step": 868352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.577518912604669e-05, |
| "loss": 3.5845, |
| "step": 868864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.576680317853617e-05, |
| "loss": 3.6118, |
| "step": 869376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.575841723102565e-05, |
| "loss": 3.6021, |
| "step": 869888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.575003128351513e-05, |
| "loss": 3.5794, |
| "step": 870400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5741661714808336e-05, |
| "loss": 3.6041, |
| "step": 870912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5733275767297816e-05, |
| "loss": 3.5869, |
| "step": 871424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5724889819787296e-05, |
| "loss": 3.5869, |
| "step": 871936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.571652025108051e-05, |
| "loss": 3.5942, |
| "step": 872448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.570813430356999e-05, |
| "loss": 3.6025, |
| "step": 872960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569974835605947e-05, |
| "loss": 3.5805, |
| "step": 873472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.569136240854895e-05, |
| "loss": 3.5975, |
| "step": 873984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.568297646103843e-05, |
| "loss": 3.5817, |
| "step": 874496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.567460689233164e-05, |
| "loss": 3.5971, |
| "step": 875008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.566622094482112e-05, |
| "loss": 3.6021, |
| "step": 875520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.56578349973106e-05, |
| "loss": 3.6009, |
| "step": 876032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564944904980008e-05, |
| "loss": 3.6029, |
| "step": 876544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.564107948109329e-05, |
| "loss": 3.604, |
| "step": 877056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.563269353358277e-05, |
| "loss": 3.6187, |
| "step": 877568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.562430758607225e-05, |
| "loss": 3.5999, |
| "step": 878080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.561592163856174e-05, |
| "loss": 3.6072, |
| "step": 878592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5607552069854946e-05, |
| "loss": 3.5883, |
| "step": 879104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5599166122344426e-05, |
| "loss": 3.5949, |
| "step": 879616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5590780174833906e-05, |
| "loss": 3.5954, |
| "step": 880128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5582394227323386e-05, |
| "loss": 3.6014, |
| "step": 880640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5574024658616595e-05, |
| "loss": 3.6035, |
| "step": 881152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5565638711106075e-05, |
| "loss": 3.6078, |
| "step": 881664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5557252763595555e-05, |
| "loss": 3.5881, |
| "step": 882176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5548866816085035e-05, |
| "loss": 3.5886, |
| "step": 882688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5540497247378244e-05, |
| "loss": 3.5854, |
| "step": 883200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5532111299867724e-05, |
| "loss": 3.6102, |
| "step": 883712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5523725352357204e-05, |
| "loss": 3.597, |
| "step": 884224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.5515339404846684e-05, |
| "loss": 3.588, |
| "step": 884736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.55069698361399e-05, |
| "loss": 3.5998, |
| "step": 885248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.549858388862938e-05, |
| "loss": 3.5945, |
| "step": 885760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.549019794111886e-05, |
| "loss": 3.5912, |
| "step": 886272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.548181199360834e-05, |
| "loss": 3.5898, |
| "step": 886784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.547344242490155e-05, |
| "loss": 3.6037, |
| "step": 887296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.546505647739103e-05, |
| "loss": 3.6029, |
| "step": 887808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.545667052988051e-05, |
| "loss": 3.602, |
| "step": 888320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.544828458236999e-05, |
| "loss": 3.5891, |
| "step": 888832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.54399150136632e-05, |
| "loss": 3.5935, |
| "step": 889344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.543152906615268e-05, |
| "loss": 3.5944, |
| "step": 889856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.542314311864216e-05, |
| "loss": 3.601, |
| "step": 890368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.541475717113164e-05, |
| "loss": 3.601, |
| "step": 890880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.540638760242485e-05, |
| "loss": 3.5989, |
| "step": 891392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.539800165491433e-05, |
| "loss": 3.5985, |
| "step": 891904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.538961570740381e-05, |
| "loss": 3.5988, |
| "step": 892416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.538122975989329e-05, |
| "loss": 3.5953, |
| "step": 892928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.53728601911865e-05, |
| "loss": 3.5834, |
| "step": 893440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.536447424367598e-05, |
| "loss": 3.5884, |
| "step": 893952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.535608829616546e-05, |
| "loss": 3.5931, |
| "step": 894464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.534770234865494e-05, |
| "loss": 3.594, |
| "step": 894976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.533933277994815e-05, |
| "loss": 3.5832, |
| "step": 895488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.533094683243763e-05, |
| "loss": 3.5846, |
| "step": 896000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.532256088492711e-05, |
| "loss": 3.5861, |
| "step": 896512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.531417493741659e-05, |
| "loss": 3.5912, |
| "step": 897024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.530580536870981e-05, |
| "loss": 3.5935, |
| "step": 897536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.529741942119929e-05, |
| "loss": 3.5933, |
| "step": 898048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.528903347368877e-05, |
| "loss": 3.601, |
| "step": 898560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.528064752617825e-05, |
| "loss": 3.6001, |
| "step": 899072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5272277957471456e-05, |
| "loss": 3.5925, |
| "step": 899584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5263892009960936e-05, |
| "loss": 3.6019, |
| "step": 900096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5255506062450416e-05, |
| "loss": 3.5946, |
| "step": 900608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5247120114939896e-05, |
| "loss": 3.5977, |
| "step": 901120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5238750546233105e-05, |
| "loss": 3.5925, |
| "step": 901632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5230364598722585e-05, |
| "loss": 3.5925, |
| "step": 902144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5221978651212065e-05, |
| "loss": 3.5928, |
| "step": 902656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5213592703701545e-05, |
| "loss": 3.594, |
| "step": 903168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.520522313499476e-05, |
| "loss": 3.5882, |
| "step": 903680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.519683718748424e-05, |
| "loss": 3.5901, |
| "step": 904192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.518845123997372e-05, |
| "loss": 3.5755, |
| "step": 904704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.518008167126693e-05, |
| "loss": 3.5888, |
| "step": 905216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.517169572375641e-05, |
| "loss": 3.5955, |
| "step": 905728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.516330977624589e-05, |
| "loss": 3.5912, |
| "step": 906240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.515492382873537e-05, |
| "loss": 3.6036, |
| "step": 906752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.514655426002858e-05, |
| "loss": 3.5769, |
| "step": 907264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.513816831251806e-05, |
| "loss": 3.6058, |
| "step": 907776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512978236500754e-05, |
| "loss": 3.5969, |
| "step": 908288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.512139641749702e-05, |
| "loss": 3.5991, |
| "step": 908800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5113026848790234e-05, |
| "loss": 3.5862, |
| "step": 909312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5104640901279714e-05, |
| "loss": 3.5903, |
| "step": 909824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5096254953769194e-05, |
| "loss": 3.6044, |
| "step": 910336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.5087869006258674e-05, |
| "loss": 3.6012, |
| "step": 910848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.507949943755188e-05, |
| "loss": 3.588, |
| "step": 911360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.507111349004136e-05, |
| "loss": 3.5907, |
| "step": 911872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.506272754253084e-05, |
| "loss": 3.5946, |
| "step": 912384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.505434159502032e-05, |
| "loss": 3.5934, |
| "step": 912896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.504597202631353e-05, |
| "loss": 3.5874, |
| "step": 913408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.503758607880301e-05, |
| "loss": 3.5953, |
| "step": 913920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.502920013129249e-05, |
| "loss": 3.5924, |
| "step": 914432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.502081418378197e-05, |
| "loss": 3.5914, |
| "step": 914944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.501244461507519e-05, |
| "loss": 3.5978, |
| "step": 915456 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8436367511749268, |
| "eval_runtime": 315.6041, |
| "eval_samples_per_second": 1209.081, |
| "eval_steps_per_second": 37.785, |
| "step": 915840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.500405866756467e-05, |
| "loss": 3.5904, |
| "step": 915968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.499567272005415e-05, |
| "loss": 3.5745, |
| "step": 916480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.498728677254363e-05, |
| "loss": 3.5906, |
| "step": 916992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.497891720383684e-05, |
| "loss": 3.5942, |
| "step": 917504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.497053125632632e-05, |
| "loss": 3.5947, |
| "step": 918016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.49621453088158e-05, |
| "loss": 3.5965, |
| "step": 918528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.495375936130528e-05, |
| "loss": 3.5894, |
| "step": 919040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4945389792598486e-05, |
| "loss": 3.5859, |
| "step": 919552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4937003845087966e-05, |
| "loss": 3.5763, |
| "step": 920064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4928617897577446e-05, |
| "loss": 3.5909, |
| "step": 920576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4920231950066926e-05, |
| "loss": 3.5903, |
| "step": 921088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.491186238136014e-05, |
| "loss": 3.5883, |
| "step": 921600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.490349281265335e-05, |
| "loss": 3.5943, |
| "step": 922112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.489510686514283e-05, |
| "loss": 3.5727, |
| "step": 922624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.488672091763231e-05, |
| "loss": 3.584, |
| "step": 923136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.487833497012179e-05, |
| "loss": 3.5757, |
| "step": 923648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.486994902261127e-05, |
| "loss": 3.5868, |
| "step": 924160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4861563075100744e-05, |
| "loss": 3.5816, |
| "step": 924672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4853177127590224e-05, |
| "loss": 3.5828, |
| "step": 925184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4844791180079704e-05, |
| "loss": 3.5807, |
| "step": 925696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.483642161137292e-05, |
| "loss": 3.6044, |
| "step": 926208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.48280356638624e-05, |
| "loss": 3.5824, |
| "step": 926720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.481964971635188e-05, |
| "loss": 3.5914, |
| "step": 927232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.481126376884136e-05, |
| "loss": 3.5848, |
| "step": 927744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.4802894200134575e-05, |
| "loss": 3.5924, |
| "step": 928256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.479450825262405e-05, |
| "loss": 3.5918, |
| "step": 928768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.478612230511353e-05, |
| "loss": 3.5757, |
| "step": 929280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.477773635760301e-05, |
| "loss": 3.5857, |
| "step": 929792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.476936678889622e-05, |
| "loss": 3.576, |
| "step": 930304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.47609808413857e-05, |
| "loss": 3.5762, |
| "step": 930816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.475259489387518e-05, |
| "loss": 3.591, |
| "step": 931328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.474420894636466e-05, |
| "loss": 3.5875, |
| "step": 931840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4735839377657866e-05, |
| "loss": 3.5816, |
| "step": 932352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.472745343014735e-05, |
| "loss": 3.5999, |
| "step": 932864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.471906748263683e-05, |
| "loss": 3.5833, |
| "step": 933376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.471068153512631e-05, |
| "loss": 3.5808, |
| "step": 933888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.470231196641952e-05, |
| "loss": 3.5933, |
| "step": 934400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4693926018909e-05, |
| "loss": 3.5762, |
| "step": 934912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.468554007139848e-05, |
| "loss": 3.5812, |
| "step": 935424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.467715412388796e-05, |
| "loss": 3.5822, |
| "step": 935936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.466878455518117e-05, |
| "loss": 3.5776, |
| "step": 936448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.466039860767065e-05, |
| "loss": 3.587, |
| "step": 936960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.465201266016013e-05, |
| "loss": 3.5927, |
| "step": 937472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.464362671264961e-05, |
| "loss": 3.5795, |
| "step": 937984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.463525714394282e-05, |
| "loss": 3.5859, |
| "step": 938496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.462687119643231e-05, |
| "loss": 3.586, |
| "step": 939008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.461848524892179e-05, |
| "loss": 3.5801, |
| "step": 939520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.461009930141127e-05, |
| "loss": 3.5878, |
| "step": 940032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4601729732704476e-05, |
| "loss": 3.5577, |
| "step": 940544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4593343785193956e-05, |
| "loss": 3.5844, |
| "step": 941056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4584957837683436e-05, |
| "loss": 3.5823, |
| "step": 941568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4576571890172916e-05, |
| "loss": 3.5876, |
| "step": 942080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4568202321466125e-05, |
| "loss": 3.5738, |
| "step": 942592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4559816373955605e-05, |
| "loss": 3.5629, |
| "step": 943104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4551430426445085e-05, |
| "loss": 3.5671, |
| "step": 943616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4543044478934565e-05, |
| "loss": 3.5693, |
| "step": 944128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4534674910227774e-05, |
| "loss": 3.5896, |
| "step": 944640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.452628896271726e-05, |
| "loss": 3.5658, |
| "step": 945152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.451790301520674e-05, |
| "loss": 3.5937, |
| "step": 945664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.450951706769622e-05, |
| "loss": 3.5824, |
| "step": 946176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.450114749898943e-05, |
| "loss": 3.5661, |
| "step": 946688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.449276155147891e-05, |
| "loss": 3.5848, |
| "step": 947200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.448437560396839e-05, |
| "loss": 3.5646, |
| "step": 947712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.447598965645787e-05, |
| "loss": 3.5714, |
| "step": 948224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.446762008775108e-05, |
| "loss": 3.5717, |
| "step": 948736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.445923414024056e-05, |
| "loss": 3.5831, |
| "step": 949248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.445084819273004e-05, |
| "loss": 3.5663, |
| "step": 949760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.444246224521952e-05, |
| "loss": 3.5787, |
| "step": 950272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.443409267651273e-05, |
| "loss": 3.561, |
| "step": 950784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.442570672900221e-05, |
| "loss": 3.5797, |
| "step": 951296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4417320781491694e-05, |
| "loss": 3.5834, |
| "step": 951808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4408934833981174e-05, |
| "loss": 3.5835, |
| "step": 952320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.440056526527438e-05, |
| "loss": 3.5843, |
| "step": 952832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.439217931776386e-05, |
| "loss": 3.5867, |
| "step": 953344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.438379337025334e-05, |
| "loss": 3.5988, |
| "step": 953856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.437540742274282e-05, |
| "loss": 3.5844, |
| "step": 954368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.436703785403603e-05, |
| "loss": 3.5915, |
| "step": 954880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.435865190652551e-05, |
| "loss": 3.5693, |
| "step": 955392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.435026595901499e-05, |
| "loss": 3.5758, |
| "step": 955904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.434188001150447e-05, |
| "loss": 3.5788, |
| "step": 956416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.433351044279768e-05, |
| "loss": 3.5824, |
| "step": 956928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.432512449528716e-05, |
| "loss": 3.5857, |
| "step": 957440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.431673854777665e-05, |
| "loss": 3.5886, |
| "step": 957952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.430835260026613e-05, |
| "loss": 3.5762, |
| "step": 958464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.429998303155934e-05, |
| "loss": 3.568, |
| "step": 958976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.429159708404882e-05, |
| "loss": 3.5687, |
| "step": 959488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.42832111365383e-05, |
| "loss": 3.5893, |
| "step": 960000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4274825189027777e-05, |
| "loss": 3.5775, |
| "step": 960512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4266455620320986e-05, |
| "loss": 3.5707, |
| "step": 961024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.4258069672810466e-05, |
| "loss": 3.5793, |
| "step": 961536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4249683725299946e-05, |
| "loss": 3.5773, |
| "step": 962048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4241297777789426e-05, |
| "loss": 3.5709, |
| "step": 962560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4232928209082635e-05, |
| "loss": 3.5713, |
| "step": 963072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4224542261572115e-05, |
| "loss": 3.585, |
| "step": 963584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.42161563140616e-05, |
| "loss": 3.5879, |
| "step": 964096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.420777036655108e-05, |
| "loss": 3.5841, |
| "step": 964608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.419940079784429e-05, |
| "loss": 3.5686, |
| "step": 965120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.419101485033377e-05, |
| "loss": 3.5771, |
| "step": 965632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.418262890282325e-05, |
| "loss": 3.5798, |
| "step": 966144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.417424295531273e-05, |
| "loss": 3.5796, |
| "step": 966656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.416587338660594e-05, |
| "loss": 3.5855, |
| "step": 967168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.415748743909542e-05, |
| "loss": 3.579, |
| "step": 967680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.41491014915849e-05, |
| "loss": 3.5804, |
| "step": 968192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.414071554407438e-05, |
| "loss": 3.5786, |
| "step": 968704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.413234597536759e-05, |
| "loss": 3.5788, |
| "step": 969216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.412396002785707e-05, |
| "loss": 3.5663, |
| "step": 969728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4115574080346555e-05, |
| "loss": 3.5725, |
| "step": 970240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4107188132836035e-05, |
| "loss": 3.5769, |
| "step": 970752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4098818564129244e-05, |
| "loss": 3.5736, |
| "step": 971264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4090432616618724e-05, |
| "loss": 3.5621, |
| "step": 971776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4082046669108204e-05, |
| "loss": 3.5696, |
| "step": 972288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.4073660721597684e-05, |
| "loss": 3.563, |
| "step": 972800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.406529115289089e-05, |
| "loss": 3.5803, |
| "step": 973312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.405690520538037e-05, |
| "loss": 3.5741, |
| "step": 973824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.404851925786985e-05, |
| "loss": 3.5762, |
| "step": 974336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.404013331035933e-05, |
| "loss": 3.5783, |
| "step": 974848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.403176374165254e-05, |
| "loss": 3.5838, |
| "step": 975360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.402337779414202e-05, |
| "loss": 3.5765, |
| "step": 975872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.401499184663151e-05, |
| "loss": 3.5776, |
| "step": 976384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.400660589912099e-05, |
| "loss": 3.5801, |
| "step": 976896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.39982363304142e-05, |
| "loss": 3.5772, |
| "step": 977408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398985038290368e-05, |
| "loss": 3.5762, |
| "step": 977920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.398146443539316e-05, |
| "loss": 3.5756, |
| "step": 978432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.397307848788264e-05, |
| "loss": 3.5749, |
| "step": 978944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.396470891917585e-05, |
| "loss": 3.5727, |
| "step": 979456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.395632297166533e-05, |
| "loss": 3.5711, |
| "step": 979968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3947937024154807e-05, |
| "loss": 3.5739, |
| "step": 980480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3939551076644287e-05, |
| "loss": 3.5586, |
| "step": 980992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3931181507937496e-05, |
| "loss": 3.5724, |
| "step": 981504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3922795560426976e-05, |
| "loss": 3.5748, |
| "step": 982016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.391440961291646e-05, |
| "loss": 3.5757, |
| "step": 982528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.390602366540594e-05, |
| "loss": 3.5834, |
| "step": 983040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.389765409669915e-05, |
| "loss": 3.5657, |
| "step": 983552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388926814918863e-05, |
| "loss": 3.5828, |
| "step": 984064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.388088220167811e-05, |
| "loss": 3.5801, |
| "step": 984576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.387251263297132e-05, |
| "loss": 3.5771, |
| "step": 985088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.38641266854608e-05, |
| "loss": 3.5736, |
| "step": 985600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.385574073795028e-05, |
| "loss": 3.5713, |
| "step": 986112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.384735479043976e-05, |
| "loss": 3.5881, |
| "step": 986624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383898522173297e-05, |
| "loss": 3.5817, |
| "step": 987136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.383059927422245e-05, |
| "loss": 3.5739, |
| "step": 987648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.382221332671193e-05, |
| "loss": 3.5713, |
| "step": 988160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3813827379201416e-05, |
| "loss": 3.5758, |
| "step": 988672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3805457810494625e-05, |
| "loss": 3.5742, |
| "step": 989184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3797071862984105e-05, |
| "loss": 3.573, |
| "step": 989696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3788685915473585e-05, |
| "loss": 3.5754, |
| "step": 990208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3780299967963065e-05, |
| "loss": 3.5745, |
| "step": 990720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3771930399256274e-05, |
| "loss": 3.5775, |
| "step": 991232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.3763544451745754e-05, |
| "loss": 3.5793, |
| "step": 991744 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.843928575515747, |
| "eval_runtime": 319.0482, |
| "eval_samples_per_second": 1196.029, |
| "eval_steps_per_second": 37.377, |
| "step": 992160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3755158504235234e-05, |
| "loss": 3.5768, |
| "step": 992256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3746772556724714e-05, |
| "loss": 3.558, |
| "step": 992768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.373838660921419e-05, |
| "loss": 3.5696, |
| "step": 993280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.373000066170367e-05, |
| "loss": 3.5809, |
| "step": 993792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.372163109299688e-05, |
| "loss": 3.5761, |
| "step": 994304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.371324514548636e-05, |
| "loss": 3.5802, |
| "step": 994816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.370485919797584e-05, |
| "loss": 3.5671, |
| "step": 995328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.369647325046532e-05, |
| "loss": 3.5692, |
| "step": 995840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.368810368175854e-05, |
| "loss": 3.5585, |
| "step": 996352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367971773424802e-05, |
| "loss": 3.5769, |
| "step": 996864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.367133178673749e-05, |
| "loss": 3.5699, |
| "step": 997376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.366294583922697e-05, |
| "loss": 3.573, |
| "step": 997888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.365459264932392e-05, |
| "loss": 3.5765, |
| "step": 998400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.36462067018134e-05, |
| "loss": 3.5534, |
| "step": 998912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.363782075430288e-05, |
| "loss": 3.5666, |
| "step": 999424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.362943480679236e-05, |
| "loss": 3.5549, |
| "step": 999936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3621048859281837e-05, |
| "loss": 3.5715, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3612662911771317e-05, |
| "loss": 3.5609, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3604276964260797e-05, |
| "loss": 3.5694, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3595891016750276e-05, |
| "loss": 3.5601, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3587521448043486e-05, |
| "loss": 3.5844, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3579135500532966e-05, |
| "loss": 3.5677, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3570749553022445e-05, |
| "loss": 3.5726, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3562363605511925e-05, |
| "loss": 3.5698, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3553994036805135e-05, |
| "loss": 3.5776, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3545608089294614e-05, |
| "loss": 3.5712, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3537222141784094e-05, |
| "loss": 3.5618, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3528836194273574e-05, |
| "loss": 3.5724, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.352046662556679e-05, |
| "loss": 3.5576, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.351208067805627e-05, |
| "loss": 3.5594, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.350369473054575e-05, |
| "loss": 3.5695, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.349530878303523e-05, |
| "loss": 3.5669, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.348693921432844e-05, |
| "loss": 3.5685, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.347855326681792e-05, |
| "loss": 3.5808, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.34701673193074e-05, |
| "loss": 3.5665, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.346178137179688e-05, |
| "loss": 3.5688, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.345341180309009e-05, |
| "loss": 3.5706, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.344502585557957e-05, |
| "loss": 3.5591, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.343663990806905e-05, |
| "loss": 3.5687, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.342825396055853e-05, |
| "loss": 3.5624, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3419884391851744e-05, |
| "loss": 3.5593, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3411498444341224e-05, |
| "loss": 3.5697, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3403112496830704e-05, |
| "loss": 3.5792, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3394726549320184e-05, |
| "loss": 3.5612, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.338635698061339e-05, |
| "loss": 3.5699, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.337797103310287e-05, |
| "loss": 3.5724, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.336958508559235e-05, |
| "loss": 3.5634, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.336119913808183e-05, |
| "loss": 3.572, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.335282956937504e-05, |
| "loss": 3.5388, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.334444362186452e-05, |
| "loss": 3.5681, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3336057674354e-05, |
| "loss": 3.5616, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.332767172684348e-05, |
| "loss": 3.5721, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.33193021581367e-05, |
| "loss": 3.557, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.331091621062618e-05, |
| "loss": 3.549, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.330253026311566e-05, |
| "loss": 3.5453, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.329414431560514e-05, |
| "loss": 3.5491, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3285774746898347e-05, |
| "loss": 3.5732, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3277388799387827e-05, |
| "loss": 3.5521, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3269002851877306e-05, |
| "loss": 3.5738, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3260616904366786e-05, |
| "loss": 3.5618, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3252247335659996e-05, |
| "loss": 3.5533, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3243861388149475e-05, |
| "loss": 3.5663, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3235475440638955e-05, |
| "loss": 3.5471, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3227089493128435e-05, |
| "loss": 3.5536, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.321871992442165e-05, |
| "loss": 3.5547, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.321033397691113e-05, |
| "loss": 3.567, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.320194802940061e-05, |
| "loss": 3.5513, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.319356208189009e-05, |
| "loss": 3.5544, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.31851925131833e-05, |
| "loss": 3.5481, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.317680656567278e-05, |
| "loss": 3.561, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.316842061816226e-05, |
| "loss": 3.5678, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.316003467065174e-05, |
| "loss": 3.5655, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.315166510194495e-05, |
| "loss": 3.5664, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.314327915443443e-05, |
| "loss": 3.5692, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.313489320692391e-05, |
| "loss": 3.5802, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.312650725941339e-05, |
| "loss": 3.565, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3118137690706605e-05, |
| "loss": 3.575, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3109751743196085e-05, |
| "loss": 3.5571, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3101365795685565e-05, |
| "loss": 3.5597, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3092996226978774e-05, |
| "loss": 3.5614, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3084610279468254e-05, |
| "loss": 3.5609, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3076224331957734e-05, |
| "loss": 3.5698, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.3067838384447214e-05, |
| "loss": 3.5715, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.305946881574042e-05, |
| "loss": 3.5662, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.30510828682299e-05, |
| "loss": 3.5463, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.304269692071938e-05, |
| "loss": 3.5522, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.303431097320886e-05, |
| "loss": 3.5703, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.302594140450208e-05, |
| "loss": 3.5629, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.301755545699156e-05, |
| "loss": 3.5564, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.300916950948104e-05, |
| "loss": 3.5618, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.300078356197052e-05, |
| "loss": 3.5617, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.299241399326373e-05, |
| "loss": 3.5552, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.298402804575321e-05, |
| "loss": 3.5596, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.297564209824269e-05, |
| "loss": 3.5683, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.296725615073217e-05, |
| "loss": 3.5674, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2958886582025377e-05, |
| "loss": 3.5673, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2950500634514857e-05, |
| "loss": 3.5534, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2942114687004336e-05, |
| "loss": 3.5579, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2933728739493816e-05, |
| "loss": 3.5647, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.292535917078703e-05, |
| "loss": 3.565, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.291697322327651e-05, |
| "loss": 3.5687, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.290858727576599e-05, |
| "loss": 3.563, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.290020132825547e-05, |
| "loss": 3.5635, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.289183175954868e-05, |
| "loss": 3.5615, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.288344581203816e-05, |
| "loss": 3.5616, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.287505986452764e-05, |
| "loss": 3.5502, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.286667391701712e-05, |
| "loss": 3.5517, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.285830434831033e-05, |
| "loss": 3.5603, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284991840079981e-05, |
| "loss": 3.5607, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.284153245328929e-05, |
| "loss": 3.5478, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.283314650577877e-05, |
| "loss": 3.5515, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2824776937071986e-05, |
| "loss": 3.5482, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2816390989561466e-05, |
| "loss": 3.5587, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2808005042050946e-05, |
| "loss": 3.559, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2799619094540426e-05, |
| "loss": 3.5581, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2791249525833635e-05, |
| "loss": 3.5627, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2782863578323115e-05, |
| "loss": 3.5693, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2774477630812595e-05, |
| "loss": 3.5578, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2766091683302075e-05, |
| "loss": 3.5628, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2757722114595284e-05, |
| "loss": 3.5677, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2749336167084764e-05, |
| "loss": 3.5599, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2740950219574244e-05, |
| "loss": 3.556, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2732564272063724e-05, |
| "loss": 3.563, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.272419470335694e-05, |
| "loss": 3.5593, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.271580875584642e-05, |
| "loss": 3.5543, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.27074228083359e-05, |
| "loss": 3.5505, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.269903686082538e-05, |
| "loss": 3.5609, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.269066729211859e-05, |
| "loss": 3.5476, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.26822977234118e-05, |
| "loss": 3.5566, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.267391177590128e-05, |
| "loss": 3.5505, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.266552582839076e-05, |
| "loss": 3.5607, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.265713988088024e-05, |
| "loss": 3.5643, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.264875393336972e-05, |
| "loss": 3.5507, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.26403679858592e-05, |
| "loss": 3.565, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.263198203834868e-05, |
| "loss": 3.5641, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.262359609083816e-05, |
| "loss": 3.5643, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.261522652213137e-05, |
| "loss": 3.5584, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.260684057462085e-05, |
| "loss": 3.5527, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.259845462711033e-05, |
| "loss": 3.5733, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.259006867959981e-05, |
| "loss": 3.5581, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.258169911089302e-05, |
| "loss": 3.5626, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.25733131633825e-05, |
| "loss": 3.5554, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.256492721587198e-05, |
| "loss": 3.5595, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2556541268361455e-05, |
| "loss": 3.5552, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.254817169965467e-05, |
| "loss": 3.5553, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253978575214415e-05, |
| "loss": 3.562, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.253139980463363e-05, |
| "loss": 3.5564, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.252301385712311e-05, |
| "loss": 3.5626, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.251464428841633e-05, |
| "loss": 3.5562, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8437607288360596, |
| "eval_runtime": 305.0264, |
| "eval_samples_per_second": 1251.01, |
| "eval_steps_per_second": 39.095, |
| "step": 1068480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.250625834090581e-05, |
| "loss": 3.5578, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.249787239339529e-05, |
| "loss": 3.5464, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.248948644588476e-05, |
| "loss": 3.5505, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2481116877177976e-05, |
| "loss": 3.5644, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2472730929667456e-05, |
| "loss": 3.5601, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.246434498215693e-05, |
| "loss": 3.564, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2455975413450145e-05, |
| "loss": 3.5495, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2447589465939625e-05, |
| "loss": 3.5533, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2439203518429105e-05, |
| "loss": 3.5446, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.243081757091858e-05, |
| "loss": 3.5578, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2422448002211794e-05, |
| "loss": 3.5558, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.241406205470128e-05, |
| "loss": 3.5561, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.240569248599449e-05, |
| "loss": 3.5617, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.239730653848397e-05, |
| "loss": 3.5401, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238892059097345e-05, |
| "loss": 3.5463, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.238053464346293e-05, |
| "loss": 3.5445, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.23721486959524e-05, |
| "loss": 3.5538, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.236376274844188e-05, |
| "loss": 3.545, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.235537680093136e-05, |
| "loss": 3.5543, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.234699085342084e-05, |
| "loss": 3.544, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.233862128471405e-05, |
| "loss": 3.5673, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.233023533720353e-05, |
| "loss": 3.5528, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.232184938969302e-05, |
| "loss": 3.5528, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.23134634421825e-05, |
| "loss": 3.5544, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.230509387347571e-05, |
| "loss": 3.5648, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.229670792596519e-05, |
| "loss": 3.5542, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.228832197845467e-05, |
| "loss": 3.5468, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.227993603094415e-05, |
| "loss": 3.5545, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2271566462237356e-05, |
| "loss": 3.5403, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.2263180514726836e-05, |
| "loss": 3.5423, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2254794567216316e-05, |
| "loss": 3.5526, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2246408619705796e-05, |
| "loss": 3.5511, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2238039050999005e-05, |
| "loss": 3.5537, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2229653103488485e-05, |
| "loss": 3.5616, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.222126715597797e-05, |
| "loss": 3.5511, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.221288120846745e-05, |
| "loss": 3.5537, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.220451163976066e-05, |
| "loss": 3.5556, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.219612569225014e-05, |
| "loss": 3.5423, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.218773974473962e-05, |
| "loss": 3.5557, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.21793537972291e-05, |
| "loss": 3.545, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.217098422852231e-05, |
| "loss": 3.5456, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.216259828101179e-05, |
| "loss": 3.5523, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.215421233350127e-05, |
| "loss": 3.5611, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.214582638599075e-05, |
| "loss": 3.5434, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.213745681728396e-05, |
| "loss": 3.5593, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.212907086977344e-05, |
| "loss": 3.5499, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2120684922262926e-05, |
| "loss": 3.5493, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2112298974752406e-05, |
| "loss": 3.5526, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2103929406045615e-05, |
| "loss": 3.523, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2095543458535095e-05, |
| "loss": 3.5484, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2087157511024575e-05, |
| "loss": 3.5545, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2078771563514055e-05, |
| "loss": 3.5518, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2070401994807264e-05, |
| "loss": 3.5398, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2062016047296744e-05, |
| "loss": 3.5347, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2053630099786224e-05, |
| "loss": 3.5314, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.2045244152275704e-05, |
| "loss": 3.5348, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.203687458356891e-05, |
| "loss": 3.5595, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.202848863605839e-05, |
| "loss": 3.5338, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.202010268854788e-05, |
| "loss": 3.5574, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.201171674103736e-05, |
| "loss": 3.5471, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.200334717233057e-05, |
| "loss": 3.5347, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.199496122482005e-05, |
| "loss": 3.5502, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.198657527730953e-05, |
| "loss": 3.5349, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.197818932979901e-05, |
| "loss": 3.5398, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.196981976109222e-05, |
| "loss": 3.5346, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.19614338135817e-05, |
| "loss": 3.5497, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.195304786607118e-05, |
| "loss": 3.5393, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.194466191856066e-05, |
| "loss": 3.5359, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1936292349853866e-05, |
| "loss": 3.5347, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1927906402343346e-05, |
| "loss": 3.5437, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.191952045483283e-05, |
| "loss": 3.5516, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.191115088612604e-05, |
| "loss": 3.5514, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.190276493861552e-05, |
| "loss": 3.5514, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1894378991105e-05, |
| "loss": 3.5545, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.188599304359448e-05, |
| "loss": 3.5635, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.187762347488769e-05, |
| "loss": 3.5477, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.186923752737717e-05, |
| "loss": 3.5582, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.186085157986665e-05, |
| "loss": 3.5425, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.185246563235613e-05, |
| "loss": 3.5449, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.184409606364934e-05, |
| "loss": 3.5452, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.183571011613882e-05, |
| "loss": 3.5426, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.18273241686283e-05, |
| "loss": 3.5559, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.181893822111778e-05, |
| "loss": 3.5586, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1810568652410996e-05, |
| "loss": 3.5508, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1802182704900476e-05, |
| "loss": 3.5273, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1793796757389956e-05, |
| "loss": 3.5386, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1785410809879436e-05, |
| "loss": 3.5504, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1777041241172645e-05, |
| "loss": 3.5489, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1768655293662125e-05, |
| "loss": 3.5395, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.1760269346151605e-05, |
| "loss": 3.5488, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1751883398641085e-05, |
| "loss": 3.5454, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1743513829934294e-05, |
| "loss": 3.5377, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1735127882423774e-05, |
| "loss": 3.5412, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1726741934913254e-05, |
| "loss": 3.5542, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1718355987402734e-05, |
| "loss": 3.5479, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.170998641869595e-05, |
| "loss": 3.5559, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.170160047118543e-05, |
| "loss": 3.5373, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.169321452367491e-05, |
| "loss": 3.5418, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.168482857616439e-05, |
| "loss": 3.5515, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.16764590074576e-05, |
| "loss": 3.5496, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.166807305994708e-05, |
| "loss": 3.5499, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.165968711243656e-05, |
| "loss": 3.5509, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.165130116492604e-05, |
| "loss": 3.5476, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.164293159621925e-05, |
| "loss": 3.5467, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.163454564870873e-05, |
| "loss": 3.5498, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.162615970119821e-05, |
| "loss": 3.5314, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.161777375368769e-05, |
| "loss": 3.5403, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.16094041849809e-05, |
| "loss": 3.5425, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.160101823747038e-05, |
| "loss": 3.5421, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.159263228995986e-05, |
| "loss": 3.5354, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.158424634244934e-05, |
| "loss": 3.5362, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.157587677374255e-05, |
| "loss": 3.5309, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.156749082623203e-05, |
| "loss": 3.5464, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155910487872151e-05, |
| "loss": 3.54, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.155071893121099e-05, |
| "loss": 3.5436, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.15423493625042e-05, |
| "loss": 3.5465, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.153396341499368e-05, |
| "loss": 3.552, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.152557746748316e-05, |
| "loss": 3.5438, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.151719151997264e-05, |
| "loss": 3.5457, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.150882195126586e-05, |
| "loss": 3.5523, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.150043600375534e-05, |
| "loss": 3.5435, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.149205005624482e-05, |
| "loss": 3.5383, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.14836641087343e-05, |
| "loss": 3.5452, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1475294540027506e-05, |
| "loss": 3.5446, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1466908592516986e-05, |
| "loss": 3.5424, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1458522645006466e-05, |
| "loss": 3.5337, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1450136697495946e-05, |
| "loss": 3.5475, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1441767128789155e-05, |
| "loss": 3.5321, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1433397560082364e-05, |
| "loss": 3.5412, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.142501161257185e-05, |
| "loss": 3.5301, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.141662566506133e-05, |
| "loss": 3.5526, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.140823971755081e-05, |
| "loss": 3.5494, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139985377004029e-05, |
| "loss": 3.5324, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.139146782252977e-05, |
| "loss": 3.5493, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.138308187501925e-05, |
| "loss": 3.5475, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1374695927508724e-05, |
| "loss": 3.5484, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.136632635880194e-05, |
| "loss": 3.5414, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.135794041129142e-05, |
| "loss": 3.5418, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.13495544637809e-05, |
| "loss": 3.5576, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.134116851627037e-05, |
| "loss": 3.5413, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.133279894756359e-05, |
| "loss": 3.5486, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.132441300005307e-05, |
| "loss": 3.5394, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.131602705254255e-05, |
| "loss": 3.545, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.130764110503203e-05, |
| "loss": 3.5389, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1299271536325244e-05, |
| "loss": 3.538, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.1290885588814724e-05, |
| "loss": 3.5476, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.12824996413042e-05, |
| "loss": 3.5421, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.127411369379368e-05, |
| "loss": 3.55, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.126574412508689e-05, |
| "loss": 3.5417, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.844676971435547, |
| "eval_runtime": 306.2543, |
| "eval_samples_per_second": 1245.994, |
| "eval_steps_per_second": 38.938, |
| "step": 1144800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.125735817757637e-05, |
| "loss": 3.4929, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1248972230065846e-05, |
| "loss": 3.5352, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1240586282555326e-05, |
| "loss": 3.5341, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1232200335044806e-05, |
| "loss": 3.5471, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.122383076633802e-05, |
| "loss": 3.5438, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.12154448188275e-05, |
| "loss": 3.5524, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.120705887131698e-05, |
| "loss": 3.5318, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.119867292380646e-05, |
| "loss": 3.5368, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.119030335509967e-05, |
| "loss": 3.5295, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118191740758915e-05, |
| "loss": 3.5437, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.117353146007863e-05, |
| "loss": 3.5355, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.116514551256811e-05, |
| "loss": 3.5457, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1156792322665056e-05, |
| "loss": 3.5447, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1148406375154536e-05, |
| "loss": 3.5293, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1140020427644016e-05, |
| "loss": 3.5294, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1131634480133496e-05, |
| "loss": 3.5313, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1123248532622976e-05, |
| "loss": 3.5348, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1114862585112456e-05, |
| "loss": 3.5333, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1106476637601936e-05, |
| "loss": 3.5393, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1098090690091416e-05, |
| "loss": 3.5292, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1089721121384625e-05, |
| "loss": 3.5495, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1081335173874105e-05, |
| "loss": 3.5357, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1072949226363585e-05, |
| "loss": 3.5378, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1064563278853065e-05, |
| "loss": 3.5405, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1056193710146274e-05, |
| "loss": 3.5488, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1047807762635754e-05, |
| "loss": 3.5395, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1039421815125234e-05, |
| "loss": 3.5333, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1031035867614713e-05, |
| "loss": 3.5383, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.102266629890793e-05, |
| "loss": 3.5249, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.101428035139741e-05, |
| "loss": 3.5313, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.100589440388689e-05, |
| "loss": 3.5332, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.099750845637637e-05, |
| "loss": 3.5428, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098913888766958e-05, |
| "loss": 3.5344, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098075294015906e-05, |
| "loss": 3.5427, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097236699264854e-05, |
| "loss": 3.5411, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.096398104513802e-05, |
| "loss": 3.5381, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.095561147643123e-05, |
| "loss": 3.5409, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.094722552892071e-05, |
| "loss": 3.5272, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.093883958141019e-05, |
| "loss": 3.5409, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.093045363389967e-05, |
| "loss": 3.5288, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.092208406519288e-05, |
| "loss": 3.5319, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.091369811768236e-05, |
| "loss": 3.5376, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.090531217017184e-05, |
| "loss": 3.5431, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.089692622266132e-05, |
| "loss": 3.5321, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.088855665395453e-05, |
| "loss": 3.5386, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.088017070644401e-05, |
| "loss": 3.536, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.087178475893349e-05, |
| "loss": 3.5335, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.086339881142297e-05, |
| "loss": 3.54, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.085502924271618e-05, |
| "loss": 3.5105, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.084664329520566e-05, |
| "loss": 3.5301, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.083825734769514e-05, |
| "loss": 3.5423, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082987140018462e-05, |
| "loss": 3.5382, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082150183147784e-05, |
| "loss": 3.5248, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.081311588396732e-05, |
| "loss": 3.5148, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0804729936456797e-05, |
| "loss": 3.5224, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0796343988946277e-05, |
| "loss": 3.5136, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0787974420239486e-05, |
| "loss": 3.5471, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0779588472728966e-05, |
| "loss": 3.5185, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0771202525218446e-05, |
| "loss": 3.5418, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0762816577707926e-05, |
| "loss": 3.5318, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0754447009001135e-05, |
| "loss": 3.5222, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0746061061490615e-05, |
| "loss": 3.5344, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0737675113980095e-05, |
| "loss": 3.5235, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0729289166469574e-05, |
| "loss": 3.5232, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.072091959776279e-05, |
| "loss": 3.5181, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.071253365025227e-05, |
| "loss": 3.5391, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.070414770274175e-05, |
| "loss": 3.5255, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.069576175523123e-05, |
| "loss": 3.5194, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.068739218652444e-05, |
| "loss": 3.5172, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.067900623901392e-05, |
| "loss": 3.5278, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06706202915034e-05, |
| "loss": 3.5378, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.066223434399288e-05, |
| "loss": 3.5374, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.065386477528609e-05, |
| "loss": 3.5352, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.064547882777557e-05, |
| "loss": 3.537, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.063709288026505e-05, |
| "loss": 3.5457, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.062870693275453e-05, |
| "loss": 3.5393, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0620337364047744e-05, |
| "loss": 3.5408, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0611951416537224e-05, |
| "loss": 3.5268, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0603565469026704e-05, |
| "loss": 3.5313, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0595179521516184e-05, |
| "loss": 3.527, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058680995280939e-05, |
| "loss": 3.5323, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057842400529887e-05, |
| "loss": 3.5387, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057003805778835e-05, |
| "loss": 3.5443, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.056165211027783e-05, |
| "loss": 3.5329, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.055328254157104e-05, |
| "loss": 3.5166, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.054489659406052e-05, |
| "loss": 3.5212, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.053651064655e-05, |
| "loss": 3.5336, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.052814107784321e-05, |
| "loss": 3.5401, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.05197551303327e-05, |
| "loss": 3.5207, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0511369182822174e-05, |
| "loss": 3.5348, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0502983235311654e-05, |
| "loss": 3.5346, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0494613666604867e-05, |
| "loss": 3.5217, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0486227719094347e-05, |
| "loss": 3.5263, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0477841771583827e-05, |
| "loss": 3.5385, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0469455824073307e-05, |
| "loss": 3.5324, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0461086255366516e-05, |
| "loss": 3.5419, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0452700307855996e-05, |
| "loss": 3.5283, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0444314360345476e-05, |
| "loss": 3.5255, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.043592841283496e-05, |
| "loss": 3.5349, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0427558844128168e-05, |
| "loss": 3.5374, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0419172896617648e-05, |
| "loss": 3.5329, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0410786949107128e-05, |
| "loss": 3.5353, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0402401001596608e-05, |
| "loss": 3.5332, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.039403143288982e-05, |
| "loss": 3.534, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.03856454853793e-05, |
| "loss": 3.5334, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.037725953786878e-05, |
| "loss": 3.5153, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.036887359035826e-05, |
| "loss": 3.5246, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.036050402165147e-05, |
| "loss": 3.5336, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.035211807414095e-05, |
| "loss": 3.5281, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.034373212663043e-05, |
| "loss": 3.5196, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0335346179119913e-05, |
| "loss": 3.5221, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.032697661041312e-05, |
| "loss": 3.517, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.03185906629026e-05, |
| "loss": 3.5315, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.031020471539208e-05, |
| "loss": 3.524, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.030181876788156e-05, |
| "loss": 3.5335, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0293449199174774e-05, |
| "loss": 3.529, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0285063251664254e-05, |
| "loss": 3.5387, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0276677304153734e-05, |
| "loss": 3.5309, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0268291356643214e-05, |
| "loss": 3.5323, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0259921787936423e-05, |
| "loss": 3.5345, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0251535840425903e-05, |
| "loss": 3.53, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0243149892915383e-05, |
| "loss": 3.5263, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0234763945404866e-05, |
| "loss": 3.5303, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0226394376698075e-05, |
| "loss": 3.5296, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0218008429187555e-05, |
| "loss": 3.5286, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0209622481677035e-05, |
| "loss": 3.5223, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0201236534166515e-05, |
| "loss": 3.5259, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0192866965459728e-05, |
| "loss": 3.5205, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0184481017949208e-05, |
| "loss": 3.524, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0176095070438688e-05, |
| "loss": 3.5159, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0167709122928168e-05, |
| "loss": 3.5403, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0159339554221377e-05, |
| "loss": 3.5337, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0150953606710857e-05, |
| "loss": 3.5229, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0142567659200337e-05, |
| "loss": 3.5281, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.013418171168982e-05, |
| "loss": 3.5308, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.012581214298303e-05, |
| "loss": 3.5371, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.011742619547251e-05, |
| "loss": 3.5313, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.010904024796199e-05, |
| "loss": 3.5242, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0100654300451465e-05, |
| "loss": 3.5417, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.009228473174468e-05, |
| "loss": 3.5214, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.008389878423416e-05, |
| "loss": 3.5354, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.007551283672364e-05, |
| "loss": 3.5272, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0067126889213114e-05, |
| "loss": 3.5329, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.005875732050633e-05, |
| "loss": 3.5268, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.005037137299581e-05, |
| "loss": 3.5196, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.004198542548529e-05, |
| "loss": 3.5353, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0033599477974767e-05, |
| "loss": 3.5298, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0025229909267983e-05, |
| "loss": 3.5311, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0016843961757463e-05, |
| "loss": 3.5259, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.0008458014246936e-05, |
| "loss": 3.5316, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8448235988616943, |
| "eval_runtime": 304.4779, |
| "eval_samples_per_second": 1253.263, |
| "eval_steps_per_second": 39.165, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.000007206673642e-05, |
| "loss": 3.518, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9991702498029635e-05, |
| "loss": 3.518, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9983316550519115e-05, |
| "loss": 3.5344, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9974930603008588e-05, |
| "loss": 3.53, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9966544655498068e-05, |
| "loss": 3.538, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9958175086791284e-05, |
| "loss": 3.5167, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9949789139280764e-05, |
| "loss": 3.5244, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.994140319177024e-05, |
| "loss": 3.5126, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.993301724425972e-05, |
| "loss": 3.5314, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9924647675552936e-05, |
| "loss": 3.5224, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.991626172804241e-05, |
| "loss": 3.5308, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.990787578053189e-05, |
| "loss": 3.5256, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9899506211825105e-05, |
| "loss": 3.5205, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.989112026431459e-05, |
| "loss": 3.5151, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9882734316804062e-05, |
| "loss": 3.5176, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9874348369293542e-05, |
| "loss": 3.514, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9865962421783022e-05, |
| "loss": 3.5199, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9857576474272502e-05, |
| "loss": 3.5226, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.984919052676198e-05, |
| "loss": 3.5119, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9840820958055194e-05, |
| "loss": 3.5407, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9832435010544674e-05, |
| "loss": 3.5222, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9824049063034154e-05, |
| "loss": 3.526, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9815663115523634e-05, |
| "loss": 3.5246, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9807293546816843e-05, |
| "loss": 3.5334, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9798907599306326e-05, |
| "loss": 3.5207, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9790521651795806e-05, |
| "loss": 3.5229, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9782135704285286e-05, |
| "loss": 3.5201, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9773766135578495e-05, |
| "loss": 3.5181, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9765380188067975e-05, |
| "loss": 3.51, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9756994240557455e-05, |
| "loss": 3.519, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9748608293046935e-05, |
| "loss": 3.5284, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9740238724340148e-05, |
| "loss": 3.523, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9731852776829628e-05, |
| "loss": 3.5285, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9723466829319108e-05, |
| "loss": 3.5293, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9715080881808588e-05, |
| "loss": 3.5235, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9706711313101797e-05, |
| "loss": 3.5267, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.969832536559128e-05, |
| "loss": 3.5164, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.968993941808076e-05, |
| "loss": 3.5243, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.968155347057024e-05, |
| "loss": 3.5157, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.967318390186345e-05, |
| "loss": 3.5214, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.966479795435293e-05, |
| "loss": 3.5196, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.965641200684241e-05, |
| "loss": 3.5301, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.964802605933189e-05, |
| "loss": 3.5147, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.96396564906251e-05, |
| "loss": 3.525, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.963127054311458e-05, |
| "loss": 3.5257, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.962288459560406e-05, |
| "loss": 3.524, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.961449864809354e-05, |
| "loss": 3.5234, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.960612907938675e-05, |
| "loss": 3.4975, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9597743131876234e-05, |
| "loss": 3.5171, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9589357184365714e-05, |
| "loss": 3.5235, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9580971236855194e-05, |
| "loss": 3.5268, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9572601668148403e-05, |
| "loss": 3.5125, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9564215720637883e-05, |
| "loss": 3.4995, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9555829773127363e-05, |
| "loss": 3.5098, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9547443825616843e-05, |
| "loss": 3.4998, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9539074256910055e-05, |
| "loss": 3.5318, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9530688309399535e-05, |
| "loss": 3.5078, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9522302361889015e-05, |
| "loss": 3.525, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9513916414378495e-05, |
| "loss": 3.5201, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9505546845671704e-05, |
| "loss": 3.5052, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9497160898161184e-05, |
| "loss": 3.5192, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9488774950650667e-05, |
| "loss": 3.5079, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9480389003140147e-05, |
| "loss": 3.5065, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9472019434433356e-05, |
| "loss": 3.5041, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9463633486922836e-05, |
| "loss": 3.5275, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9455247539412316e-05, |
| "loss": 3.5142, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9446861591901796e-05, |
| "loss": 3.504, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.943849202319501e-05, |
| "loss": 3.5061, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.943010607568449e-05, |
| "loss": 3.5131, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.942172012817397e-05, |
| "loss": 3.5215, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.941333418066345e-05, |
| "loss": 3.5238, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9404964611956658e-05, |
| "loss": 3.5212, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9396578664446138e-05, |
| "loss": 3.5278, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.938819271693562e-05, |
| "loss": 3.5285, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.93798067694251e-05, |
| "loss": 3.5245, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.937143720071831e-05, |
| "loss": 3.5243, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.936305125320779e-05, |
| "loss": 3.5147, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.935466530569727e-05, |
| "loss": 3.5192, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9346295736990483e-05, |
| "loss": 3.5175, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9337909789479963e-05, |
| "loss": 3.5182, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9329523841969442e-05, |
| "loss": 3.5217, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9321137894458922e-05, |
| "loss": 3.5256, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.931276832575213e-05, |
| "loss": 3.5211, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.930438237824161e-05, |
| "loss": 3.5028, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.929599643073109e-05, |
| "loss": 3.5064, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9287610483220575e-05, |
| "loss": 3.5205, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9279240914513784e-05, |
| "loss": 3.5255, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9270854967003264e-05, |
| "loss": 3.5109, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.9262469019492744e-05, |
| "loss": 3.5184, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9254083071982224e-05, |
| "loss": 3.5224, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9245713503275436e-05, |
| "loss": 3.5076, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9237327555764916e-05, |
| "loss": 3.5114, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9228941608254396e-05, |
| "loss": 3.5187, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9220555660743876e-05, |
| "loss": 3.5206, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9212186092037085e-05, |
| "loss": 3.5244, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9203800144526565e-05, |
| "loss": 3.5205, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9195414197016045e-05, |
| "loss": 3.5058, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.918702824950553e-05, |
| "loss": 3.5241, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9178658680798738e-05, |
| "loss": 3.5228, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9170272733288217e-05, |
| "loss": 3.5176, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9161886785777697e-05, |
| "loss": 3.5232, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9153500838267177e-05, |
| "loss": 3.5164, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.914513126956039e-05, |
| "loss": 3.52, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.913674532204987e-05, |
| "loss": 3.5221, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.912835937453935e-05, |
| "loss": 3.5026, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.911997342702883e-05, |
| "loss": 3.5073, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.911160385832204e-05, |
| "loss": 3.5192, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.910321791081152e-05, |
| "loss": 3.5169, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9094831963301e-05, |
| "loss": 3.5024, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9086446015790482e-05, |
| "loss": 3.5083, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.907807644708369e-05, |
| "loss": 3.5083, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.906969049957317e-05, |
| "loss": 3.5137, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.906130455206265e-05, |
| "loss": 3.5103, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.905291860455213e-05, |
| "loss": 3.5155, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9044549035845344e-05, |
| "loss": 3.5136, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9036163088334824e-05, |
| "loss": 3.5242, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9027777140824303e-05, |
| "loss": 3.5169, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9019391193313783e-05, |
| "loss": 3.523, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9011021624606993e-05, |
| "loss": 3.521, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9002635677096472e-05, |
| "loss": 3.5191, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8994249729585952e-05, |
| "loss": 3.5064, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8985863782075436e-05, |
| "loss": 3.5174, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8977494213368645e-05, |
| "loss": 3.5126, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8969108265858125e-05, |
| "loss": 3.5208, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8960722318347605e-05, |
| "loss": 3.5041, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8952336370837085e-05, |
| "loss": 3.5171, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8943966802130297e-05, |
| "loss": 3.5035, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8935580854619777e-05, |
| "loss": 3.5079, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8927194907109257e-05, |
| "loss": 3.5021, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.891880895959873e-05, |
| "loss": 3.5279, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8910439390891946e-05, |
| "loss": 3.5244, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8902053443381426e-05, |
| "loss": 3.5068, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8893667495870906e-05, |
| "loss": 3.5116, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8885281548360383e-05, |
| "loss": 3.5178, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.88769119796536e-05, |
| "loss": 3.5224, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.886852603214308e-05, |
| "loss": 3.5199, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.886014008463256e-05, |
| "loss": 3.5095, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8851754137122035e-05, |
| "loss": 3.5263, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.884338456841525e-05, |
| "loss": 3.5103, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.883499862090473e-05, |
| "loss": 3.5236, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8826612673394204e-05, |
| "loss": 3.5155, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8818226725883684e-05, |
| "loss": 3.5142, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.88098571571769e-05, |
| "loss": 3.5157, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.880147120966638e-05, |
| "loss": 3.507, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8793085262155856e-05, |
| "loss": 3.5194, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8784699314645336e-05, |
| "loss": 3.5186, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8776329745938552e-05, |
| "loss": 3.5182, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8767943798428032e-05, |
| "loss": 3.5137, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.8759557850917505e-05, |
| "loss": 3.5118, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8461811542510986, |
| "eval_runtime": 304.6748, |
| "eval_samples_per_second": 1252.453, |
| "eval_steps_per_second": 39.14, |
| "step": 1297440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.875117190340699e-05, |
| "loss": 3.5043, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.874278595589647e-05, |
| "loss": 3.5073, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.873440000838595e-05, |
| "loss": 3.5211, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8726030439679158e-05, |
| "loss": 3.5105, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8717644492168638e-05, |
| "loss": 3.5276, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8709258544658118e-05, |
| "loss": 3.5034, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8700872597147598e-05, |
| "loss": 3.5134, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.869250302844081e-05, |
| "loss": 3.5013, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.868411708093029e-05, |
| "loss": 3.5176, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.867573113341977e-05, |
| "loss": 3.5119, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.866734518590925e-05, |
| "loss": 3.5135, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.865897561720246e-05, |
| "loss": 3.5121, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8650606048495675e-05, |
| "loss": 3.5059, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.864222010098515e-05, |
| "loss": 3.5027, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.863383415347463e-05, |
| "loss": 3.5039, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.862544820596411e-05, |
| "loss": 3.4998, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.861706225845359e-05, |
| "loss": 3.5083, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.860867631094307e-05, |
| "loss": 3.5077, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.860029036343255e-05, |
| "loss": 3.5026, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8591920794725764e-05, |
| "loss": 3.5272, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8583534847215244e-05, |
| "loss": 3.5056, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8575148899704724e-05, |
| "loss": 3.5123, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8566762952194204e-05, |
| "loss": 3.5142, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8558393383487413e-05, |
| "loss": 3.5239, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8550007435976893e-05, |
| "loss": 3.5011, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8541621488466376e-05, |
| "loss": 3.511, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8533235540955856e-05, |
| "loss": 3.5068, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8524865972249065e-05, |
| "loss": 3.5117, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8516480024738545e-05, |
| "loss": 3.4944, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8508094077228025e-05, |
| "loss": 3.504, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8499708129717505e-05, |
| "loss": 3.5137, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8491338561010717e-05, |
| "loss": 3.511, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8482952613500197e-05, |
| "loss": 3.5097, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8474566665989677e-05, |
| "loss": 3.5172, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8466180718479157e-05, |
| "loss": 3.5106, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8457811149772366e-05, |
| "loss": 3.5093, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8449425202261846e-05, |
| "loss": 3.5041, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.844103925475133e-05, |
| "loss": 3.5152, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.843265330724081e-05, |
| "loss": 3.4997, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.842428373853402e-05, |
| "loss": 3.5073, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.84158977910235e-05, |
| "loss": 3.5087, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.840751184351298e-05, |
| "loss": 3.5164, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.839912589600246e-05, |
| "loss": 3.5015, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.839075632729567e-05, |
| "loss": 3.5092, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.838237037978515e-05, |
| "loss": 3.5172, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.837398443227463e-05, |
| "loss": 3.5086, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.836559848476411e-05, |
| "loss": 3.5114, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.835722891605732e-05, |
| "loss": 3.4843, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.83488429685468e-05, |
| "loss": 3.4969, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8340457021036283e-05, |
| "loss": 3.5135, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8332071073525763e-05, |
| "loss": 3.51, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8323701504818972e-05, |
| "loss": 3.5015, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8315315557308452e-05, |
| "loss": 3.4871, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8306929609797932e-05, |
| "loss": 3.4947, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8298543662287412e-05, |
| "loss": 3.4864, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8290174093580625e-05, |
| "loss": 3.513, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8281788146070105e-05, |
| "loss": 3.4976, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8273402198559585e-05, |
| "loss": 3.5103, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8265016251049065e-05, |
| "loss": 3.5099, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8256646682342274e-05, |
| "loss": 3.494, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8248260734831754e-05, |
| "loss": 3.5058, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8239874787321237e-05, |
| "loss": 3.4929, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8231488839810717e-05, |
| "loss": 3.4953, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8223119271103926e-05, |
| "loss": 3.4944, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8214733323593406e-05, |
| "loss": 3.5082, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8206347376082886e-05, |
| "loss": 3.5018, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8197961428572366e-05, |
| "loss": 3.4906, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.818959185986558e-05, |
| "loss": 3.4923, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.818120591235506e-05, |
| "loss": 3.4986, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8172819964844538e-05, |
| "loss": 3.5083, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8164434017334018e-05, |
| "loss": 3.5099, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8156064448627227e-05, |
| "loss": 3.5073, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8147678501116707e-05, |
| "loss": 3.5181, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.813929255360619e-05, |
| "loss": 3.5118, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.813090660609567e-05, |
| "loss": 3.5159, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.812253703738888e-05, |
| "loss": 3.51, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.811415108987836e-05, |
| "loss": 3.5013, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.810576514236784e-05, |
| "loss": 3.5071, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.809737919485732e-05, |
| "loss": 3.5043, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8089009626150532e-05, |
| "loss": 3.5056, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8080623678640012e-05, |
| "loss": 3.5104, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8072237731129492e-05, |
| "loss": 3.5139, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8063851783618972e-05, |
| "loss": 3.5043, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.805548221491218e-05, |
| "loss": 3.4963, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.804709626740166e-05, |
| "loss": 3.4929, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8038710319891144e-05, |
| "loss": 3.5056, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8030324372380624e-05, |
| "loss": 3.5115, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8021954803673833e-05, |
| "loss": 3.4987, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.8013568856163313e-05, |
| "loss": 3.5044, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8005182908652793e-05, |
| "loss": 3.5082, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7996813339946006e-05, |
| "loss": 3.4983, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7988427392435486e-05, |
| "loss": 3.4989, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7980041444924966e-05, |
| "loss": 3.5013, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7971655497414446e-05, |
| "loss": 3.5102, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7963269549903926e-05, |
| "loss": 3.5093, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7954883602393406e-05, |
| "loss": 3.5107, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7946497654882882e-05, |
| "loss": 3.493, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7938111707372362e-05, |
| "loss": 3.512, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7929742138665578e-05, |
| "loss": 3.5023, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7921356191155058e-05, |
| "loss": 3.5095, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.791297024364453e-05, |
| "loss": 3.5051, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.790458429613401e-05, |
| "loss": 3.5062, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7896214727427227e-05, |
| "loss": 3.507, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7887828779916703e-05, |
| "loss": 3.5109, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7879442832406183e-05, |
| "loss": 3.4886, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7871056884895663e-05, |
| "loss": 3.4962, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.786268731618888e-05, |
| "loss": 3.5024, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7854301368678352e-05, |
| "loss": 3.5026, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7845915421167836e-05, |
| "loss": 3.4905, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7837529473657316e-05, |
| "loss": 3.4961, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.782915990495053e-05, |
| "loss": 3.4947, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7820773957440005e-05, |
| "loss": 3.4997, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7812388009929485e-05, |
| "loss": 3.4979, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7804002062418965e-05, |
| "loss": 3.5009, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7795632493712177e-05, |
| "loss": 3.5049, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7787246546201657e-05, |
| "loss": 3.51, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7778860598691137e-05, |
| "loss": 3.5031, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7770474651180617e-05, |
| "loss": 3.5053, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7762105082473826e-05, |
| "loss": 3.5113, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7753719134963306e-05, |
| "loss": 3.507, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.774533318745279e-05, |
| "loss": 3.494, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.773694723994227e-05, |
| "loss": 3.5063, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.772857767123548e-05, |
| "loss": 3.4986, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.772019172372496e-05, |
| "loss": 3.5078, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.771180577621444e-05, |
| "loss": 3.4877, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.770341982870392e-05, |
| "loss": 3.5063, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.769505025999713e-05, |
| "loss": 3.4899, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.768666431248661e-05, |
| "loss": 3.4908, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.767827836497609e-05, |
| "loss": 3.4908, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.76699087962693e-05, |
| "loss": 3.5141, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.766152284875878e-05, |
| "loss": 3.5093, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.765313690124826e-05, |
| "loss": 3.4966, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.764475095373774e-05, |
| "loss": 3.4961, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7636381385030952e-05, |
| "loss": 3.5067, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7627995437520432e-05, |
| "loss": 3.5073, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7619609490009912e-05, |
| "loss": 3.5052, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7611223542499392e-05, |
| "loss": 3.4993, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.76028539737926e-05, |
| "loss": 3.508, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7594468026282085e-05, |
| "loss": 3.4998, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7586082078771564e-05, |
| "loss": 3.5135, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7577696131261044e-05, |
| "loss": 3.5001, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7569326562554254e-05, |
| "loss": 3.5019, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7560940615043733e-05, |
| "loss": 3.5, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7552554667533213e-05, |
| "loss": 3.498, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7544168720022693e-05, |
| "loss": 3.5067, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7535799151315906e-05, |
| "loss": 3.503, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7527413203805386e-05, |
| "loss": 3.5075, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7519027256294866e-05, |
| "loss": 3.4974, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7510641308784346e-05, |
| "loss": 3.4988, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8479113578796387, |
| "eval_runtime": 303.3384, |
| "eval_samples_per_second": 1257.971, |
| "eval_steps_per_second": 39.313, |
| "step": 1373760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7502271740077555e-05, |
| "loss": 3.4957, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7493885792567038e-05, |
| "loss": 3.4949, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7485499845056518e-05, |
| "loss": 3.5066, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7477113897545998e-05, |
| "loss": 3.4955, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7468744328839207e-05, |
| "loss": 3.517, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7460358381328687e-05, |
| "loss": 3.4886, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7451972433818167e-05, |
| "loss": 3.5005, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7443586486307647e-05, |
| "loss": 3.4865, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.743521691760086e-05, |
| "loss": 3.5032, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.742683097009034e-05, |
| "loss": 3.4968, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.741844502257982e-05, |
| "loss": 3.4998, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.74100590750693e-05, |
| "loss": 3.4981, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.740168950636251e-05, |
| "loss": 3.4914, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7393303558851992e-05, |
| "loss": 3.4936, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7384917611341472e-05, |
| "loss": 3.4934, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7376531663830952e-05, |
| "loss": 3.4885, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7368145716320432e-05, |
| "loss": 3.4911, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.735975976880991e-05, |
| "loss": 3.4956, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.735137382129939e-05, |
| "loss": 3.4906, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.73430042525926e-05, |
| "loss": 3.5129, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7334618305082084e-05, |
| "loss": 3.4928, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7326232357571564e-05, |
| "loss": 3.4975, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7317846410061044e-05, |
| "loss": 3.5007, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7309476841354253e-05, |
| "loss": 3.508, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7301090893843733e-05, |
| "loss": 3.4887, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7292704946333213e-05, |
| "loss": 3.5005, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7284318998822693e-05, |
| "loss": 3.4933, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7275949430115905e-05, |
| "loss": 3.4984, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.7267563482605385e-05, |
| "loss": 3.4797, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7259177535094865e-05, |
| "loss": 3.4914, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7250791587584345e-05, |
| "loss": 3.5004, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7242422018877554e-05, |
| "loss": 3.5012, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7234036071367038e-05, |
| "loss": 3.5001, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7225650123856518e-05, |
| "loss": 3.5051, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7217264176345998e-05, |
| "loss": 3.4963, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7208894607639207e-05, |
| "loss": 3.4997, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7200508660128687e-05, |
| "loss": 3.489, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7192122712618167e-05, |
| "loss": 3.504, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7183736765107647e-05, |
| "loss": 3.4833, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.717536719640086e-05, |
| "loss": 3.4949, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.716698124889034e-05, |
| "loss": 3.4946, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.715859530137982e-05, |
| "loss": 3.5028, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.71502093538693e-05, |
| "loss": 3.4946, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7141839785162508e-05, |
| "loss": 3.4923, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.713345383765199e-05, |
| "loss": 3.5033, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.712506789014147e-05, |
| "loss": 3.4989, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.711668194263095e-05, |
| "loss": 3.4961, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.710831237392416e-05, |
| "loss": 3.4812, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.709992642641364e-05, |
| "loss": 3.4813, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.709154047890312e-05, |
| "loss": 3.4954, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.70831545313926e-05, |
| "loss": 3.4967, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7074784962685813e-05, |
| "loss": 3.4898, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7066399015175293e-05, |
| "loss": 3.4763, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7058013067664773e-05, |
| "loss": 3.482, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7049627120154253e-05, |
| "loss": 3.4817, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7041257551447462e-05, |
| "loss": 3.4938, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7032871603936945e-05, |
| "loss": 3.4868, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7024485656426425e-05, |
| "loss": 3.4964, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7016099708915905e-05, |
| "loss": 3.494, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.7007730140209114e-05, |
| "loss": 3.4848, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6999344192698594e-05, |
| "loss": 3.491, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6990958245188074e-05, |
| "loss": 3.4826, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.698257229767755e-05, |
| "loss": 3.4794, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6974202728970766e-05, |
| "loss": 3.4859, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6965816781460246e-05, |
| "loss": 3.4915, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6957430833949726e-05, |
| "loss": 3.4979, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.69490448864392e-05, |
| "loss": 3.4749, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6940675317732415e-05, |
| "loss": 3.4814, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6932289370221895e-05, |
| "loss": 3.48, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.692390342271138e-05, |
| "loss": 3.5029, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6915517475200852e-05, |
| "loss": 3.4916, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6907147906494068e-05, |
| "loss": 3.493, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6898761958983548e-05, |
| "loss": 3.5087, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6890376011473024e-05, |
| "loss": 3.4985, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6881990063962504e-05, |
| "loss": 3.504, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.687362049525572e-05, |
| "loss": 3.4938, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.68652345477452e-05, |
| "loss": 3.4942, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6856848600234673e-05, |
| "loss": 3.4931, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6848462652724153e-05, |
| "loss": 3.4901, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.684009308401737e-05, |
| "loss": 3.4951, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.683170713650685e-05, |
| "loss": 3.4935, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6823321188996326e-05, |
| "loss": 3.5013, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6814935241485806e-05, |
| "loss": 3.4898, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.680656567277902e-05, |
| "loss": 3.4867, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6798179725268498e-05, |
| "loss": 3.4767, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6789793777757978e-05, |
| "loss": 3.4948, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6781407830247458e-05, |
| "loss": 3.4999, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6773038261540674e-05, |
| "loss": 3.4882, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.6764652314030147e-05, |
| "loss": 3.4908, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6756266366519627e-05, |
| "loss": 3.4969, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6747896797812843e-05, |
| "loss": 3.4775, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6739510850302323e-05, |
| "loss": 3.4894, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.67311249027918e-05, |
| "loss": 3.4862, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.672273895528128e-05, |
| "loss": 3.4979, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.671435300777076e-05, |
| "loss": 3.496, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.670596706026024e-05, |
| "loss": 3.4961, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.669758111274972e-05, |
| "loss": 3.4821, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.66891951652392e-05, |
| "loss": 3.5024, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.668082559653241e-05, |
| "loss": 3.4858, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.667243964902189e-05, |
| "loss": 3.5022, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.666405370151137e-05, |
| "loss": 3.4919, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.665566775400085e-05, |
| "loss": 3.4943, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.664729818529406e-05, |
| "loss": 3.4892, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.663891223778354e-05, |
| "loss": 3.502, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6630526290273024e-05, |
| "loss": 3.4761, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6622140342762504e-05, |
| "loss": 3.4838, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6613770774055713e-05, |
| "loss": 3.4911, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6605384826545193e-05, |
| "loss": 3.4905, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6596998879034673e-05, |
| "loss": 3.4782, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6588612931524153e-05, |
| "loss": 3.485, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6580243362817365e-05, |
| "loss": 3.4803, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6571857415306845e-05, |
| "loss": 3.4893, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6563471467796325e-05, |
| "loss": 3.4849, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6555085520285805e-05, |
| "loss": 3.4846, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6546715951579014e-05, |
| "loss": 3.4953, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6538330004068494e-05, |
| "loss": 3.4994, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6529944056557977e-05, |
| "loss": 3.4931, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6521558109047457e-05, |
| "loss": 3.4894, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6513188540340667e-05, |
| "loss": 3.4984, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6504802592830146e-05, |
| "loss": 3.4892, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6496416645319626e-05, |
| "loss": 3.4828, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6488030697809106e-05, |
| "loss": 3.4974, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.647966112910232e-05, |
| "loss": 3.4843, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.64712751815918e-05, |
| "loss": 3.4939, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.646288923408128e-05, |
| "loss": 3.4796, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.645450328657076e-05, |
| "loss": 3.4929, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6446133717863968e-05, |
| "loss": 3.4776, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6437747770353448e-05, |
| "loss": 3.4812, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.642936182284293e-05, |
| "loss": 3.4787, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.642097587533241e-05, |
| "loss": 3.4986, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.641260630662562e-05, |
| "loss": 3.4942, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.64042203591151e-05, |
| "loss": 3.4877, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.639583441160458e-05, |
| "loss": 3.482, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.638744846409406e-05, |
| "loss": 3.4898, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6379078895387273e-05, |
| "loss": 3.4978, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6370692947876753e-05, |
| "loss": 3.4903, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6362307000366232e-05, |
| "loss": 3.4877, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6353921052855712e-05, |
| "loss": 3.4987, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.634555148414892e-05, |
| "loss": 3.4894, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.63371655366384e-05, |
| "loss": 3.4965, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6328779589127885e-05, |
| "loss": 3.4863, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6320393641617365e-05, |
| "loss": 3.4908, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6312024072910574e-05, |
| "loss": 3.4878, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6303638125400054e-05, |
| "loss": 3.4883, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6295252177889534e-05, |
| "loss": 3.4894, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6286866230379014e-05, |
| "loss": 3.4976, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6278496661672226e-05, |
| "loss": 3.4888, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6270110714161706e-05, |
| "loss": 3.4872, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.6261724766651186e-05, |
| "loss": 3.487, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8496146202087402, |
| "eval_runtime": 303.3315, |
| "eval_samples_per_second": 1258.0, |
| "eval_steps_per_second": 39.313, |
| "step": 1450080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6253338819140666e-05, |
| "loss": 3.4807, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6244952871630146e-05, |
| "loss": 3.4829, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6236566924119626e-05, |
| "loss": 3.493, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6228180976609103e-05, |
| "loss": 3.4831, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.621981140790232e-05, |
| "loss": 3.5088, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.62114254603918e-05, |
| "loss": 3.4732, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.620303951288128e-05, |
| "loss": 3.4882, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.619465356537075e-05, |
| "loss": 3.4736, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6186283996663967e-05, |
| "loss": 3.4886, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6177898049153447e-05, |
| "loss": 3.49, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6169512101642927e-05, |
| "loss": 3.4823, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.616114253293614e-05, |
| "loss": 3.49, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.615277296422935e-05, |
| "loss": 3.478, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.614438701671883e-05, |
| "loss": 3.4829, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.613600106920831e-05, |
| "loss": 3.4816, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6127615121697792e-05, |
| "loss": 3.4795, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6119229174187272e-05, |
| "loss": 3.4777, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6110843226676752e-05, |
| "loss": 3.4815, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6102457279166225e-05, |
| "loss": 3.481, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6094071331655705e-05, |
| "loss": 3.4951, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.608570176294892e-05, |
| "loss": 3.4837, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.60773158154384e-05, |
| "loss": 3.4829, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6068929867927878e-05, |
| "loss": 3.4887, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6060560299221093e-05, |
| "loss": 3.4958, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6052174351710573e-05, |
| "loss": 3.4763, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6043788404200047e-05, |
| "loss": 3.4905, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.603540245668953e-05, |
| "loss": 3.4801, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6027032887982742e-05, |
| "loss": 3.4829, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6018646940472226e-05, |
| "loss": 3.4696, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.60102609929617e-05, |
| "loss": 3.4836, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.600187504545118e-05, |
| "loss": 3.4857, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5993505476744395e-05, |
| "loss": 3.489, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5985119529233875e-05, |
| "loss": 3.486, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.597673358172335e-05, |
| "loss": 3.4931, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.596834763421283e-05, |
| "loss": 3.4825, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5959978065506047e-05, |
| "loss": 3.4878, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.595159211799552e-05, |
| "loss": 3.4778, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5943206170485e-05, |
| "loss": 3.4909, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.593482022297448e-05, |
| "loss": 3.4708, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5926450654267696e-05, |
| "loss": 3.4826, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5918064706757173e-05, |
| "loss": 3.4822, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5909678759246653e-05, |
| "loss": 3.4922, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5901292811736133e-05, |
| "loss": 3.4818, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.589292324302935e-05, |
| "loss": 3.4805, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5884537295518825e-05, |
| "loss": 3.489, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5876151348008305e-05, |
| "loss": 3.4854, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5867765400497785e-05, |
| "loss": 3.485, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5859395831790994e-05, |
| "loss": 3.4755, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5851009884280474e-05, |
| "loss": 3.464, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5842623936769954e-05, |
| "loss": 3.4848, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5834237989259434e-05, |
| "loss": 3.4816, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5825868420552646e-05, |
| "loss": 3.4766, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5817482473042126e-05, |
| "loss": 3.4679, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5809096525531606e-05, |
| "loss": 3.468, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5800710578021086e-05, |
| "loss": 3.4662, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5792341009314295e-05, |
| "loss": 3.4826, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.578395506180378e-05, |
| "loss": 3.4754, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.577556911429326e-05, |
| "loss": 3.4794, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.576718316678274e-05, |
| "loss": 3.4869, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5758813598075948e-05, |
| "loss": 3.4711, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5750427650565428e-05, |
| "loss": 3.4757, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5742041703054908e-05, |
| "loss": 3.4757, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5733655755544388e-05, |
| "loss": 3.4638, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.57252861868376e-05, |
| "loss": 3.4777, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.571690023932708e-05, |
| "loss": 3.4801, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.570851429181656e-05, |
| "loss": 3.482, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.570012834430604e-05, |
| "loss": 3.4617, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.569175877559925e-05, |
| "loss": 3.4712, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5683372828088732e-05, |
| "loss": 3.4707, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5674986880578212e-05, |
| "loss": 3.4853, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5666600933067692e-05, |
| "loss": 3.4808, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.56582313643609e-05, |
| "loss": 3.4801, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.564984541685038e-05, |
| "loss": 3.4928, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.564145946933986e-05, |
| "loss": 3.4872, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.563307352182934e-05, |
| "loss": 3.499, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5624703953122554e-05, |
| "loss": 3.4762, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5616318005612034e-05, |
| "loss": 3.4839, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5607932058101514e-05, |
| "loss": 3.4817, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5599546110590994e-05, |
| "loss": 3.4784, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5591176541884203e-05, |
| "loss": 3.4847, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5582790594373686e-05, |
| "loss": 3.4793, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5574404646863166e-05, |
| "loss": 3.488, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5566018699352646e-05, |
| "loss": 3.4794, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5557649130645855e-05, |
| "loss": 3.4687, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5549263183135335e-05, |
| "loss": 3.4689, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5540877235624815e-05, |
| "loss": 3.4815, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5532491288114295e-05, |
| "loss": 3.4873, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5524121719407507e-05, |
| "loss": 3.478, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.5515735771896987e-05, |
| "loss": 3.4753, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5507349824386467e-05, |
| "loss": 3.4844, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5498980255679676e-05, |
| "loss": 3.4693, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5490594308169156e-05, |
| "loss": 3.4741, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.548220836065864e-05, |
| "loss": 3.4774, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.547382241314812e-05, |
| "loss": 3.4861, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.54654364656376e-05, |
| "loss": 3.4872, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.545705051812708e-05, |
| "loss": 3.4811, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.544866457061656e-05, |
| "loss": 3.4652, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.544027862310604e-05, |
| "loss": 3.4913, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.543190905439925e-05, |
| "loss": 3.4726, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5423523106888732e-05, |
| "loss": 3.4904, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5415137159378212e-05, |
| "loss": 3.4833, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5406751211867692e-05, |
| "loss": 3.481, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.53983816431609e-05, |
| "loss": 3.4768, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.538999569565038e-05, |
| "loss": 3.488, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.538160974813986e-05, |
| "loss": 3.467, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.537322380062934e-05, |
| "loss": 3.4734, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5364854231922553e-05, |
| "loss": 3.4753, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5356468284412033e-05, |
| "loss": 3.4806, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5348082336901513e-05, |
| "loss": 3.4661, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5339696389390993e-05, |
| "loss": 3.4748, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5331326820684202e-05, |
| "loss": 3.4717, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5322940873173686e-05, |
| "loss": 3.4715, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5314554925663166e-05, |
| "loss": 3.4764, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5306168978152645e-05, |
| "loss": 3.4693, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5297799409445855e-05, |
| "loss": 3.485, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5289413461935335e-05, |
| "loss": 3.4837, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5281027514424814e-05, |
| "loss": 3.4825, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5272641566914294e-05, |
| "loss": 3.4763, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5264271998207507e-05, |
| "loss": 3.4833, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5255886050696987e-05, |
| "loss": 3.4805, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5247500103186467e-05, |
| "loss": 3.4692, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5239114155675947e-05, |
| "loss": 3.4832, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5230744586969156e-05, |
| "loss": 3.4745, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5222358639458636e-05, |
| "loss": 3.4802, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.521397269194812e-05, |
| "loss": 3.4689, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.52055867444376e-05, |
| "loss": 3.478, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5197217175730808e-05, |
| "loss": 3.4658, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5188831228220288e-05, |
| "loss": 3.4701, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5180445280709768e-05, |
| "loss": 3.4672, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5172059333199248e-05, |
| "loss": 3.4816, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.516368976449246e-05, |
| "loss": 3.4791, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.515530381698194e-05, |
| "loss": 3.4821, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.514691786947142e-05, |
| "loss": 3.4706, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.51385319219609e-05, |
| "loss": 3.4816, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.513016235325411e-05, |
| "loss": 3.4863, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.512177640574359e-05, |
| "loss": 3.479, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5113390458233073e-05, |
| "loss": 3.4751, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5105004510722546e-05, |
| "loss": 3.4866, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5096634942015762e-05, |
| "loss": 3.4786, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5088248994505242e-05, |
| "loss": 3.4869, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5079863046994722e-05, |
| "loss": 3.4701, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5071493478287934e-05, |
| "loss": 3.483, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5063107530777414e-05, |
| "loss": 3.4733, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5054721583266894e-05, |
| "loss": 3.4755, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5046335635756367e-05, |
| "loss": 3.4807, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5037966067049583e-05, |
| "loss": 3.4817, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5029580119539063e-05, |
| "loss": 3.4788, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5021194172028543e-05, |
| "loss": 3.473, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.501280822451802e-05, |
| "loss": 3.479, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8512766361236572, |
| "eval_runtime": 302.4453, |
| "eval_samples_per_second": 1261.686, |
| "eval_steps_per_second": 39.429, |
| "step": 1526400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.5004438655811236e-05, |
| "loss": 3.4741, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4996052708300712e-05, |
| "loss": 3.4689, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4987666760790192e-05, |
| "loss": 3.4796, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4979280813279672e-05, |
| "loss": 3.4728, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4970911244572885e-05, |
| "loss": 3.4934, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4962525297062365e-05, |
| "loss": 3.47, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4954139349551844e-05, |
| "loss": 3.4715, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4945753402041324e-05, |
| "loss": 3.4616, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4937383833334534e-05, |
| "loss": 3.4756, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4928997885824017e-05, |
| "loss": 3.4811, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4920611938313497e-05, |
| "loss": 3.4683, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4912225990802977e-05, |
| "loss": 3.4738, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4903856422096186e-05, |
| "loss": 3.469, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4895470474585666e-05, |
| "loss": 3.4701, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4887084527075146e-05, |
| "loss": 3.4711, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4878698579564626e-05, |
| "loss": 3.4677, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.487031263205411e-05, |
| "loss": 3.4662, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4861926684543586e-05, |
| "loss": 3.4734, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4853540737033066e-05, |
| "loss": 3.4668, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4845154789522546e-05, |
| "loss": 3.4805, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4836785220815758e-05, |
| "loss": 3.4717, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4828399273305235e-05, |
| "loss": 3.4746, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4820013325794718e-05, |
| "loss": 3.4759, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4811627378284198e-05, |
| "loss": 3.4817, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4803257809577407e-05, |
| "loss": 3.4647, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4794871862066887e-05, |
| "loss": 3.4767, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.4786485914556367e-05, |
| "loss": 3.4706, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.477811634584958e-05, |
| "loss": 3.4673, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.476973039833906e-05, |
| "loss": 3.4586, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.476134445082854e-05, |
| "loss": 3.4688, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.475295850331802e-05, |
| "loss": 3.4769, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4744588934611232e-05, |
| "loss": 3.4768, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.473620298710071e-05, |
| "loss": 3.4766, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4727817039590188e-05, |
| "loss": 3.4772, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.471943109207967e-05, |
| "loss": 3.4696, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.471106152337288e-05, |
| "loss": 3.4733, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.470267557586236e-05, |
| "loss": 3.4679, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.469428962835184e-05, |
| "loss": 3.476, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.468590368084132e-05, |
| "loss": 3.4632, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4677534112134533e-05, |
| "loss": 3.4719, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4669148164624013e-05, |
| "loss": 3.4689, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4660762217113493e-05, |
| "loss": 3.4789, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4652376269602973e-05, |
| "loss": 3.4707, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4644006700896182e-05, |
| "loss": 3.4682, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4635620753385662e-05, |
| "loss": 3.4813, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4627234805875142e-05, |
| "loss": 3.4709, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4618848858364625e-05, |
| "loss": 3.4712, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4610479289657834e-05, |
| "loss": 3.4633, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4602093342147314e-05, |
| "loss": 3.4511, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4593707394636794e-05, |
| "loss": 3.4742, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4585321447126274e-05, |
| "loss": 3.4713, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4576951878419487e-05, |
| "loss": 3.4669, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4568565930908967e-05, |
| "loss": 3.4521, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4560179983398447e-05, |
| "loss": 3.4599, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4551794035887927e-05, |
| "loss": 3.4507, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4543424467181136e-05, |
| "loss": 3.473, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4535038519670616e-05, |
| "loss": 3.4642, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4526652572160096e-05, |
| "loss": 3.4663, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.451826662464958e-05, |
| "loss": 3.475, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4509897055942788e-05, |
| "loss": 3.4626, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4501511108432268e-05, |
| "loss": 3.4632, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4493125160921748e-05, |
| "loss": 3.4659, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4484739213411228e-05, |
| "loss": 3.4501, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.447636964470444e-05, |
| "loss": 3.4659, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.446798369719392e-05, |
| "loss": 3.4635, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.44595977496834e-05, |
| "loss": 3.4728, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.445121180217288e-05, |
| "loss": 3.4504, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.444284223346609e-05, |
| "loss": 3.4593, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.443445628595557e-05, |
| "loss": 3.4549, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.442607033844505e-05, |
| "loss": 3.4754, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4417684390934533e-05, |
| "loss": 3.4684, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4409314822227742e-05, |
| "loss": 3.466, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4400928874717222e-05, |
| "loss": 3.4823, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.43925429272067e-05, |
| "loss": 3.4724, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.438415697969618e-05, |
| "loss": 3.4914, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4375787410989394e-05, |
| "loss": 3.4645, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4367401463478874e-05, |
| "loss": 3.4707, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4359015515968354e-05, |
| "loss": 3.4715, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.435062956845783e-05, |
| "loss": 3.4628, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4342259999751043e-05, |
| "loss": 3.4769, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4333874052240523e-05, |
| "loss": 3.4671, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4325488104730003e-05, |
| "loss": 3.4752, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4317102157219483e-05, |
| "loss": 3.4667, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4308732588512695e-05, |
| "loss": 3.4591, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4300346641002175e-05, |
| "loss": 3.465, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4291960693491655e-05, |
| "loss": 3.4598, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4283574745981132e-05, |
| "loss": 3.4766, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4275205177274344e-05, |
| "loss": 3.4683, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.4266819229763828e-05, |
| "loss": 3.4623, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4258433282253304e-05, |
| "loss": 3.4726, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4250063713546517e-05, |
| "loss": 3.4622, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4241677766035997e-05, |
| "loss": 3.4582, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4233291818525477e-05, |
| "loss": 3.4611, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4224905871014957e-05, |
| "loss": 3.4773, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4216519923504437e-05, |
| "loss": 3.4763, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4208133975993917e-05, |
| "loss": 3.4709, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4199748028483397e-05, |
| "loss": 3.4521, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4191362080972876e-05, |
| "loss": 3.4764, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4182992512266086e-05, |
| "loss": 3.4613, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4174606564755566e-05, |
| "loss": 3.4773, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.416622061724505e-05, |
| "loss": 3.4735, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.415783466973453e-05, |
| "loss": 3.47, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4149465101027738e-05, |
| "loss": 3.465, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4141079153517218e-05, |
| "loss": 3.4783, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4132693206006698e-05, |
| "loss": 3.4515, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4124307258496178e-05, |
| "loss": 3.4624, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.411593768978939e-05, |
| "loss": 3.4592, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.410755174227887e-05, |
| "loss": 3.4737, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.409916579476835e-05, |
| "loss": 3.4507, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.409077984725783e-05, |
| "loss": 3.4628, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.408241027855104e-05, |
| "loss": 3.4595, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.407402433104052e-05, |
| "loss": 3.4613, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4065638383530003e-05, |
| "loss": 3.4624, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4057252436019482e-05, |
| "loss": 3.4609, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.404888286731269e-05, |
| "loss": 3.4722, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.404049691980217e-05, |
| "loss": 3.4738, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.403211097229165e-05, |
| "loss": 3.4696, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.402372502478113e-05, |
| "loss": 3.4619, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4015355456074344e-05, |
| "loss": 3.4747, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.4006969508563824e-05, |
| "loss": 3.4658, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3998583561053304e-05, |
| "loss": 3.4577, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3990197613542784e-05, |
| "loss": 3.4732, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3981828044835993e-05, |
| "loss": 3.4604, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3973442097325473e-05, |
| "loss": 3.4714, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3965056149814956e-05, |
| "loss": 3.4568, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3956670202304433e-05, |
| "loss": 3.4676, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3948300633597645e-05, |
| "loss": 3.4585, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3939914686087125e-05, |
| "loss": 3.4544, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3931528738576605e-05, |
| "loss": 3.4617, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3923142791066082e-05, |
| "loss": 3.4681, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3914773222359298e-05, |
| "loss": 3.4654, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3906387274848778e-05, |
| "loss": 3.4707, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3898001327338258e-05, |
| "loss": 3.4566, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3889615379827734e-05, |
| "loss": 3.4731, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3881245811120947e-05, |
| "loss": 3.4741, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3872859863610427e-05, |
| "loss": 3.4684, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3864473916099906e-05, |
| "loss": 3.4648, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3856087968589386e-05, |
| "loss": 3.472, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.38477183998826e-05, |
| "loss": 3.4668, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.383933245237208e-05, |
| "loss": 3.4758, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3830946504861555e-05, |
| "loss": 3.4601, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3822576936154768e-05, |
| "loss": 3.4673, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.381419098864425e-05, |
| "loss": 3.4628, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3805805041133728e-05, |
| "loss": 3.4659, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3797419093623208e-05, |
| "loss": 3.466, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.378904952491642e-05, |
| "loss": 3.4696, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.37806635774059e-05, |
| "loss": 3.4695, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.377227762989538e-05, |
| "loss": 3.4578, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.376389168238486e-05, |
| "loss": 3.4692, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8528854846954346, |
| "eval_runtime": 303.4942, |
| "eval_samples_per_second": 1257.325, |
| "eval_steps_per_second": 39.292, |
| "step": 1602720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.375550573487434e-05, |
| "loss": 3.4584, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.374711978736382e-05, |
| "loss": 3.4562, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.37387338398533e-05, |
| "loss": 3.47, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.373034789234278e-05, |
| "loss": 3.4612, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.372197832363599e-05, |
| "loss": 3.4799, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3713592376125472e-05, |
| "loss": 3.4609, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3705206428614952e-05, |
| "loss": 3.4552, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3696820481104432e-05, |
| "loss": 3.4569, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.368845091239764e-05, |
| "loss": 3.4577, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.368006496488712e-05, |
| "loss": 3.4706, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.36716790173766e-05, |
| "loss": 3.457, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.366329306986608e-05, |
| "loss": 3.4645, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3654939879963026e-05, |
| "loss": 3.4576, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3646553932452503e-05, |
| "loss": 3.4536, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3638167984941983e-05, |
| "loss": 3.4625, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3629782037431463e-05, |
| "loss": 3.4528, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3621396089920943e-05, |
| "loss": 3.4508, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3613010142410426e-05, |
| "loss": 3.4597, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3604624194899906e-05, |
| "loss": 3.4591, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3596238247389383e-05, |
| "loss": 3.4692, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3587868678682595e-05, |
| "loss": 3.4649, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3579482731172075e-05, |
| "loss": 3.462, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3571096783661555e-05, |
| "loss": 3.465, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3562710836151035e-05, |
| "loss": 3.4676, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3554341267444247e-05, |
| "loss": 3.4523, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3545955319933727e-05, |
| "loss": 3.4708, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3537569372423207e-05, |
| "loss": 3.4559, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3529183424912684e-05, |
| "loss": 3.4556, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3520813856205896e-05, |
| "loss": 3.4502, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.3512427908695376e-05, |
| "loss": 3.4573, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3504041961184856e-05, |
| "loss": 3.4653, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.349567239247807e-05, |
| "loss": 3.4631, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.348728644496755e-05, |
| "loss": 3.4681, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.347890049745703e-05, |
| "loss": 3.4683, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3470514549946505e-05, |
| "loss": 3.4559, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.346214498123972e-05, |
| "loss": 3.4605, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.34537590337292e-05, |
| "loss": 3.4588, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.344537308621868e-05, |
| "loss": 3.4604, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3436987138708158e-05, |
| "loss": 3.4528, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.342861757000137e-05, |
| "loss": 3.4566, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.342023162249085e-05, |
| "loss": 3.4603, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.341184567498033e-05, |
| "loss": 3.4692, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.340345972746981e-05, |
| "loss": 3.4579, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3395090158763022e-05, |
| "loss": 3.4589, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3386704211252502e-05, |
| "loss": 3.467, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.337831826374198e-05, |
| "loss": 3.4608, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.336993231623146e-05, |
| "loss": 3.4583, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3361562747524675e-05, |
| "loss": 3.4546, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3353176800014155e-05, |
| "loss": 3.4342, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.334479085250363e-05, |
| "loss": 3.4684, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.333640490499311e-05, |
| "loss": 3.4599, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3328035336286324e-05, |
| "loss": 3.4572, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3319649388775804e-05, |
| "loss": 3.449, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3311263441265284e-05, |
| "loss": 3.4428, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3302877493754764e-05, |
| "loss": 3.4414, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3294507925047976e-05, |
| "loss": 3.4565, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3286121977537453e-05, |
| "loss": 3.454, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3277736030026933e-05, |
| "loss": 3.4567, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3269350082516413e-05, |
| "loss": 3.4627, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.326098051380963e-05, |
| "loss": 3.449, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3252594566299105e-05, |
| "loss": 3.4556, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3244208618788585e-05, |
| "loss": 3.4547, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3235822671278065e-05, |
| "loss": 3.4397, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3227453102571274e-05, |
| "loss": 3.4504, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3219067155060757e-05, |
| "loss": 3.4539, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3210681207550237e-05, |
| "loss": 3.4612, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3202295260039717e-05, |
| "loss": 3.4369, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3193925691332926e-05, |
| "loss": 3.4503, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3185539743822406e-05, |
| "loss": 3.4439, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3177153796311886e-05, |
| "loss": 3.4631, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3168767848801366e-05, |
| "loss": 3.4604, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.316039828009458e-05, |
| "loss": 3.4533, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.315201233258406e-05, |
| "loss": 3.4699, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.314362638507354e-05, |
| "loss": 3.4619, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.313524043756302e-05, |
| "loss": 3.482, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3126870868856228e-05, |
| "loss": 3.4543, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.311848492134571e-05, |
| "loss": 3.459, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.311009897383519e-05, |
| "loss": 3.4556, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.310171302632467e-05, |
| "loss": 3.4512, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.309334345761788e-05, |
| "loss": 3.4666, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.308495751010736e-05, |
| "loss": 3.4571, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.307657156259684e-05, |
| "loss": 3.4613, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.306818561508632e-05, |
| "loss": 3.4585, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3059816046379532e-05, |
| "loss": 3.4482, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3051430098869012e-05, |
| "loss": 3.4542, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3043044151358492e-05, |
| "loss": 3.4478, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3034658203847972e-05, |
| "loss": 3.4659, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.302628863514118e-05, |
| "loss": 3.4597, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.3017902687630665e-05, |
| "loss": 3.4465, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3009516740120145e-05, |
| "loss": 3.464, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3001147171413354e-05, |
| "loss": 3.4515, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2992761223902834e-05, |
| "loss": 3.4484, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2984375276392314e-05, |
| "loss": 3.4509, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2975989328881794e-05, |
| "loss": 3.4662, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2967603381371274e-05, |
| "loss": 3.4618, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2959217433860754e-05, |
| "loss": 3.4639, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2950831486350234e-05, |
| "loss": 3.4431, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2942445538839713e-05, |
| "loss": 3.4633, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2934075970132926e-05, |
| "loss": 3.4513, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2925690022622403e-05, |
| "loss": 3.4633, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2917304075111882e-05, |
| "loss": 3.4632, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2908918127601366e-05, |
| "loss": 3.4531, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2900548558894578e-05, |
| "loss": 3.4571, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2892162611384055e-05, |
| "loss": 3.4634, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2883776663873535e-05, |
| "loss": 3.4431, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2875390716363015e-05, |
| "loss": 3.4515, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2867021147656227e-05, |
| "loss": 3.4483, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2858635200145707e-05, |
| "loss": 3.4587, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2850249252635187e-05, |
| "loss": 3.4424, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2841863305124667e-05, |
| "loss": 3.4555, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2833493736417876e-05, |
| "loss": 3.4455, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2825107788907356e-05, |
| "loss": 3.4509, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2816721841396836e-05, |
| "loss": 3.4535, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.280833589388632e-05, |
| "loss": 3.4482, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.279996632517953e-05, |
| "loss": 3.4625, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.279158037766901e-05, |
| "loss": 3.461, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.278319443015849e-05, |
| "loss": 3.4592, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.277480848264797e-05, |
| "loss": 3.4513, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.276643891394118e-05, |
| "loss": 3.4607, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.275805296643066e-05, |
| "loss": 3.459, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.274966701892014e-05, |
| "loss": 3.444, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.274128107140962e-05, |
| "loss": 3.4651, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.273291150270283e-05, |
| "loss": 3.4467, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.272452555519231e-05, |
| "loss": 3.4622, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.271613960768179e-05, |
| "loss": 3.4436, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2707753660171273e-05, |
| "loss": 3.4588, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2699384091464482e-05, |
| "loss": 3.4459, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2690998143953962e-05, |
| "loss": 3.4478, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2682612196443442e-05, |
| "loss": 3.4448, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2674226248932922e-05, |
| "loss": 3.4624, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2665856680226135e-05, |
| "loss": 3.4526, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2657470732715615e-05, |
| "loss": 3.4604, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2649084785205095e-05, |
| "loss": 3.4436, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2640698837694574e-05, |
| "loss": 3.462, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2632329268987784e-05, |
| "loss": 3.4608, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2623943321477264e-05, |
| "loss": 3.4577, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2615557373966743e-05, |
| "loss": 3.4559, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2607171426456223e-05, |
| "loss": 3.4557, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2598801857749436e-05, |
| "loss": 3.455, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2590415910238916e-05, |
| "loss": 3.4633, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2582029962728396e-05, |
| "loss": 3.4485, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2573644015217876e-05, |
| "loss": 3.4569, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2565274446511085e-05, |
| "loss": 3.4513, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2556888499000568e-05, |
| "loss": 3.4569, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2548502551490048e-05, |
| "loss": 3.4536, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2540116603979528e-05, |
| "loss": 3.4537, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2531747035272737e-05, |
| "loss": 3.4652, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2523361087762217e-05, |
| "loss": 3.4427, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2514975140251697e-05, |
| "loss": 3.461, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8545031547546387, |
| "eval_runtime": 306.1569, |
| "eval_samples_per_second": 1246.39, |
| "eval_steps_per_second": 38.951, |
| "step": 1679040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2506589192741177e-05, |
| "loss": 3.4507, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.249821962403439e-05, |
| "loss": 3.4434, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.248983367652387e-05, |
| "loss": 3.4562, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.248144772901335e-05, |
| "loss": 3.4541, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2473061781502826e-05, |
| "loss": 3.4658, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.246469221279604e-05, |
| "loss": 3.4511, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2456306265285522e-05, |
| "loss": 3.4445, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2447920317775002e-05, |
| "loss": 3.4429, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.243953437026448e-05, |
| "loss": 3.4509, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.243116480155769e-05, |
| "loss": 3.4587, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.242277885404717e-05, |
| "loss": 3.4468, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.241439290653665e-05, |
| "loss": 3.4487, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2406023337829863e-05, |
| "loss": 3.4519, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2397637390319343e-05, |
| "loss": 3.4418, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2389251442808823e-05, |
| "loss": 3.4499, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.23808654952983e-05, |
| "loss": 3.438, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.237247954778778e-05, |
| "loss": 3.4428, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.236409360027726e-05, |
| "loss": 3.4467, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2355707652766743e-05, |
| "loss": 3.4483, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2347321705256223e-05, |
| "loss": 3.4558, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2338952136549432e-05, |
| "loss": 3.4598, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2330566189038912e-05, |
| "loss": 3.4483, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2322180241528392e-05, |
| "loss": 3.4532, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2313794294017872e-05, |
| "loss": 3.4574, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2305424725311084e-05, |
| "loss": 3.4382, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2297038777800564e-05, |
| "loss": 3.4639, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2288652830290044e-05, |
| "loss": 3.4439, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2280266882779524e-05, |
| "loss": 3.442, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2271897314072733e-05, |
| "loss": 3.439, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.2263511366562213e-05, |
| "loss": 3.4474, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2255125419051697e-05, |
| "loss": 3.4513, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2246739471541177e-05, |
| "loss": 3.4548, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2238369902834386e-05, |
| "loss": 3.4539, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2229983955323866e-05, |
| "loss": 3.4598, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2221598007813346e-05, |
| "loss": 3.4432, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2213212060302826e-05, |
| "loss": 3.4508, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2204842491596038e-05, |
| "loss": 3.4519, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2196456544085518e-05, |
| "loss": 3.4463, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2188070596574998e-05, |
| "loss": 3.4433, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2179701027868207e-05, |
| "loss": 3.4473, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2171315080357687e-05, |
| "loss": 3.4462, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2162929132847167e-05, |
| "loss": 3.4581, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2154543185336647e-05, |
| "loss": 3.4471, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.214617361662986e-05, |
| "loss": 3.4468, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.213778766911934e-05, |
| "loss": 3.4576, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.212940172160882e-05, |
| "loss": 3.4468, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.21210157740983e-05, |
| "loss": 3.4508, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.211264620539151e-05, |
| "loss": 3.4466, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2104260257880992e-05, |
| "loss": 3.4153, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2095874310370472e-05, |
| "loss": 3.461, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.208748836285995e-05, |
| "loss": 3.4478, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.207911879415316e-05, |
| "loss": 3.4486, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.207073284664264e-05, |
| "loss": 3.4377, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.206234689913212e-05, |
| "loss": 3.4275, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.20539609516216e-05, |
| "loss": 3.4339, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2045591382914813e-05, |
| "loss": 3.4432, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2037205435404293e-05, |
| "loss": 3.4437, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2028819487893773e-05, |
| "loss": 3.4417, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.202043354038325e-05, |
| "loss": 3.4537, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2012063971676462e-05, |
| "loss": 3.4368, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.2003678024165945e-05, |
| "loss": 3.4387, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1995292076655425e-05, |
| "loss": 3.4489, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1986906129144902e-05, |
| "loss": 3.4253, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1978536560438114e-05, |
| "loss": 3.4411, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1970150612927594e-05, |
| "loss": 3.4422, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1961764665417074e-05, |
| "loss": 3.4522, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1953378717906554e-05, |
| "loss": 3.4236, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1945009149199767e-05, |
| "loss": 3.4417, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1936623201689247e-05, |
| "loss": 3.4305, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1928237254178723e-05, |
| "loss": 3.4523, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1919851306668203e-05, |
| "loss": 3.4514, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1911481737961416e-05, |
| "loss": 3.4435, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.19030957904509e-05, |
| "loss": 3.4531, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1894709842940376e-05, |
| "loss": 3.4529, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1886323895429856e-05, |
| "loss": 3.471, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1877954326723068e-05, |
| "loss": 3.4449, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1869568379212545e-05, |
| "loss": 3.4494, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1861182431702028e-05, |
| "loss": 3.4455, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1852796484191508e-05, |
| "loss": 3.438, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.184442691548472e-05, |
| "loss": 3.4543, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1836040967974197e-05, |
| "loss": 3.4481, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1827655020463677e-05, |
| "loss": 3.4511, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.1819269072953157e-05, |
| "loss": 3.4538, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.181089950424637e-05, |
| "loss": 3.4369, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.180251355673585e-05, |
| "loss": 3.4372, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.179412760922533e-05, |
| "loss": 3.4376, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.178574166171481e-05, |
| "loss": 3.4563, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.177737209300802e-05, |
| "loss": 3.4461, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.17689861454975e-05, |
| "loss": 3.4396, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.176060019798698e-05, |
| "loss": 3.4399, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1752230629280194e-05, |
| "loss": 3.4479, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.174384468176967e-05, |
| "loss": 3.4357, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.173545873425915e-05, |
| "loss": 3.4413, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.172707278674863e-05, |
| "loss": 3.4576, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.171868683923811e-05, |
| "loss": 3.4505, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.171030089172759e-05, |
| "loss": 3.4515, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.170191494421707e-05, |
| "loss": 3.4342, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1693528996706554e-05, |
| "loss": 3.4522, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1685159427999763e-05, |
| "loss": 3.4395, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1676773480489243e-05, |
| "loss": 3.4527, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1668387532978723e-05, |
| "loss": 3.4494, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.16600015854682e-05, |
| "loss": 3.4476, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1651632016761415e-05, |
| "loss": 3.4466, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1643246069250895e-05, |
| "loss": 3.4494, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1634860121740375e-05, |
| "loss": 3.4328, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1626474174229852e-05, |
| "loss": 3.44, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1618104605523064e-05, |
| "loss": 3.4342, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1609718658012544e-05, |
| "loss": 3.4527, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1601332710502024e-05, |
| "loss": 3.4334, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1592946762991504e-05, |
| "loss": 3.4408, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1584577194284717e-05, |
| "loss": 3.4343, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1576191246774197e-05, |
| "loss": 3.4403, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1567805299263673e-05, |
| "loss": 3.4433, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1559419351753153e-05, |
| "loss": 3.4352, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.155104978304637e-05, |
| "loss": 3.4489, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.154266383553585e-05, |
| "loss": 3.4552, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1534277888025325e-05, |
| "loss": 3.4434, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1525891940514805e-05, |
| "loss": 3.4498, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1517522371808018e-05, |
| "loss": 3.4447, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1509136424297498e-05, |
| "loss": 3.4484, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1500750476786978e-05, |
| "loss": 3.4382, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1492364529276458e-05, |
| "loss": 3.4496, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.148399496056967e-05, |
| "loss": 3.4367, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1475609013059147e-05, |
| "loss": 3.4494, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1467223065548627e-05, |
| "loss": 3.4355, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1458837118038107e-05, |
| "loss": 3.4422, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1450467549331323e-05, |
| "loss": 3.44, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.14420816018208e-05, |
| "loss": 3.433, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.143369565431028e-05, |
| "loss": 3.4344, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.142530970679976e-05, |
| "loss": 3.4474, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.141694013809297e-05, |
| "loss": 3.4449, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.140855419058245e-05, |
| "loss": 3.4495, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.140016824307193e-05, |
| "loss": 3.4329, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.139178229556141e-05, |
| "loss": 3.4529, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.138341272685462e-05, |
| "loss": 3.4478, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.13750267793441e-05, |
| "loss": 3.4481, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.136664083183358e-05, |
| "loss": 3.4454, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.135825488432306e-05, |
| "loss": 3.4419, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1349885315616273e-05, |
| "loss": 3.4463, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1341499368105753e-05, |
| "loss": 3.4572, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1333113420595233e-05, |
| "loss": 3.436, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1324727473084713e-05, |
| "loss": 3.4444, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1316357904377922e-05, |
| "loss": 3.4455, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1307971956867405e-05, |
| "loss": 3.4465, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1299586009356885e-05, |
| "loss": 3.4358, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1291200061846365e-05, |
| "loss": 3.4421, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1282830493139574e-05, |
| "loss": 3.4545, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1274444545629054e-05, |
| "loss": 3.4323, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.1266058598118534e-05, |
| "loss": 3.451, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.855397939682007, |
| "eval_runtime": 307.3672, |
| "eval_samples_per_second": 1241.482, |
| "eval_steps_per_second": 38.797, |
| "step": 1755360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1257672650608014e-05, |
| "loss": 3.441, |
| "step": 1755648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1249286703097494e-05, |
| "loss": 3.4315, |
| "step": 1756160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1240900755586977e-05, |
| "loss": 3.4476, |
| "step": 1756672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1232514808076454e-05, |
| "loss": 3.4359, |
| "step": 1757184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1224145239369666e-05, |
| "loss": 3.4552, |
| "step": 1757696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1215759291859146e-05, |
| "loss": 3.4425, |
| "step": 1758208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1207373344348623e-05, |
| "loss": 3.4362, |
| "step": 1758720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1198987396838106e-05, |
| "loss": 3.4359, |
| "step": 1759232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.119061782813132e-05, |
| "loss": 3.44, |
| "step": 1759744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.11822318806208e-05, |
| "loss": 3.4471, |
| "step": 1760256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1173845933110275e-05, |
| "loss": 3.4354, |
| "step": 1760768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1165459985599755e-05, |
| "loss": 3.439, |
| "step": 1761280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1157090416892968e-05, |
| "loss": 3.4409, |
| "step": 1761792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1148704469382448e-05, |
| "loss": 3.431, |
| "step": 1762304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1140318521871928e-05, |
| "loss": 3.4398, |
| "step": 1762816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1131932574361408e-05, |
| "loss": 3.43, |
| "step": 1763328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.112356300565462e-05, |
| "loss": 3.4302, |
| "step": 1763840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1115177058144097e-05, |
| "loss": 3.4372, |
| "step": 1764352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1106791110633577e-05, |
| "loss": 3.4377, |
| "step": 1764864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.109840516312306e-05, |
| "loss": 3.4431, |
| "step": 1765376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.1090035594416272e-05, |
| "loss": 3.4478, |
| "step": 1765888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.108164964690575e-05, |
| "loss": 3.4382, |
| "step": 1766400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.107326369939523e-05, |
| "loss": 3.4399, |
| "step": 1766912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.106487775188471e-05, |
| "loss": 3.442, |
| "step": 1767424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.105650818317792e-05, |
| "loss": 3.432, |
| "step": 1767936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.10481222356674e-05, |
| "loss": 3.4494, |
| "step": 1768448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.103973628815688e-05, |
| "loss": 3.4305, |
| "step": 1768960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.103135034064636e-05, |
| "loss": 3.4359, |
| "step": 1769472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.102298077193957e-05, |
| "loss": 3.4307, |
| "step": 1769984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.101459482442905e-05, |
| "loss": 3.4322, |
| "step": 1770496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.100620887691853e-05, |
| "loss": 3.439, |
| "step": 1771008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0997822929408014e-05, |
| "loss": 3.4455, |
| "step": 1771520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0989453360701223e-05, |
| "loss": 3.4415, |
| "step": 1772032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0981067413190703e-05, |
| "loss": 3.4464, |
| "step": 1772544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0972681465680183e-05, |
| "loss": 3.4331, |
| "step": 1773056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0964311896973395e-05, |
| "loss": 3.4438, |
| "step": 1773568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0955925949462875e-05, |
| "loss": 3.4408, |
| "step": 1774080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0947540001952355e-05, |
| "loss": 3.4324, |
| "step": 1774592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0939154054441835e-05, |
| "loss": 3.4338, |
| "step": 1775104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0930784485735044e-05, |
| "loss": 3.4341, |
| "step": 1775616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0922398538224524e-05, |
| "loss": 3.4339, |
| "step": 1776128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0914012590714004e-05, |
| "loss": 3.4455, |
| "step": 1776640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0905626643203484e-05, |
| "loss": 3.4388, |
| "step": 1777152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0897257074496696e-05, |
| "loss": 3.4359, |
| "step": 1777664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0888871126986176e-05, |
| "loss": 3.4452, |
| "step": 1778176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0880485179475656e-05, |
| "loss": 3.4375, |
| "step": 1778688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0872099231965136e-05, |
| "loss": 3.4361, |
| "step": 1779200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0863729663258345e-05, |
| "loss": 3.4368, |
| "step": 1779712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.085534371574783e-05, |
| "loss": 3.4052, |
| "step": 1780224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.084695776823731e-05, |
| "loss": 3.4492, |
| "step": 1780736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.083857182072679e-05, |
| "loss": 3.4335, |
| "step": 1781248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0830202252019998e-05, |
| "loss": 3.4395, |
| "step": 1781760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0821816304509478e-05, |
| "loss": 3.4247, |
| "step": 1782272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0813430356998958e-05, |
| "loss": 3.4201, |
| "step": 1782784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0805044409488438e-05, |
| "loss": 3.4207, |
| "step": 1783296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.079667484078165e-05, |
| "loss": 3.431, |
| "step": 1783808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.078828889327113e-05, |
| "loss": 3.4341, |
| "step": 1784320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077990294576061e-05, |
| "loss": 3.4305, |
| "step": 1784832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.077151699825009e-05, |
| "loss": 3.4471, |
| "step": 1785344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.07631474295433e-05, |
| "loss": 3.4284, |
| "step": 1785856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.075476148203278e-05, |
| "loss": 3.4295, |
| "step": 1786368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0746375534522262e-05, |
| "loss": 3.4381, |
| "step": 1786880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0737989587011742e-05, |
| "loss": 3.4146, |
| "step": 1787392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.072962001830495e-05, |
| "loss": 3.4268, |
| "step": 1787904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.072123407079443e-05, |
| "loss": 3.4324, |
| "step": 1788416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.071284812328391e-05, |
| "loss": 3.4409, |
| "step": 1788928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.070446217577339e-05, |
| "loss": 3.4161, |
| "step": 1789440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0696092607066604e-05, |
| "loss": 3.4283, |
| "step": 1789952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0687706659556084e-05, |
| "loss": 3.421, |
| "step": 1790464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0679320712045564e-05, |
| "loss": 3.4427, |
| "step": 1790976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0670934764535044e-05, |
| "loss": 3.4366, |
| "step": 1791488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0662565195828253e-05, |
| "loss": 3.4354, |
| "step": 1792000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0654179248317733e-05, |
| "loss": 3.441, |
| "step": 1792512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0645793300807216e-05, |
| "loss": 3.4435, |
| "step": 1793024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0637407353296696e-05, |
| "loss": 3.4578, |
| "step": 1793536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0629037784589905e-05, |
| "loss": 3.4362, |
| "step": 1794048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0620651837079385e-05, |
| "loss": 3.4368, |
| "step": 1794560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0612265889568865e-05, |
| "loss": 3.4371, |
| "step": 1795072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0603879942058345e-05, |
| "loss": 3.4263, |
| "step": 1795584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0595510373351557e-05, |
| "loss": 3.439, |
| "step": 1796096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0587124425841037e-05, |
| "loss": 3.4403, |
| "step": 1796608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0578738478330517e-05, |
| "loss": 3.4411, |
| "step": 1797120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0570352530819994e-05, |
| "loss": 3.4395, |
| "step": 1797632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0561982962113206e-05, |
| "loss": 3.4283, |
| "step": 1798144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0553597014602686e-05, |
| "loss": 3.4243, |
| "step": 1798656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.054521106709217e-05, |
| "loss": 3.4275, |
| "step": 1799168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0536825119581646e-05, |
| "loss": 3.4455, |
| "step": 1799680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.052845555087486e-05, |
| "loss": 3.4347, |
| "step": 1800192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.052006960336434e-05, |
| "loss": 3.4276, |
| "step": 1800704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.051168365585382e-05, |
| "loss": 3.4291, |
| "step": 1801216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.05032977083433e-05, |
| "loss": 3.438, |
| "step": 1801728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.049492813963651e-05, |
| "loss": 3.4288, |
| "step": 1802240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.048654219212599e-05, |
| "loss": 3.4285, |
| "step": 1802752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0478156244615468e-05, |
| "loss": 3.4468, |
| "step": 1803264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0469770297104948e-05, |
| "loss": 3.4375, |
| "step": 1803776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.046140072839816e-05, |
| "loss": 3.4393, |
| "step": 1804288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.045301478088764e-05, |
| "loss": 3.4262, |
| "step": 1804800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.044462883337712e-05, |
| "loss": 3.4366, |
| "step": 1805312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.04362428858666e-05, |
| "loss": 3.4269, |
| "step": 1805824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0427873317159812e-05, |
| "loss": 3.4452, |
| "step": 1806336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0419487369649292e-05, |
| "loss": 3.4353, |
| "step": 1806848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.041110142213877e-05, |
| "loss": 3.4394, |
| "step": 1807360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.040271547462825e-05, |
| "loss": 3.4367, |
| "step": 1807872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0394345905921465e-05, |
| "loss": 3.4373, |
| "step": 1808384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.038595995841094e-05, |
| "loss": 3.4265, |
| "step": 1808896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.037757401090042e-05, |
| "loss": 3.4263, |
| "step": 1809408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.03691880633899e-05, |
| "loss": 3.4251, |
| "step": 1809920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0360818494683114e-05, |
| "loss": 3.4399, |
| "step": 1810432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0352432547172594e-05, |
| "loss": 3.4221, |
| "step": 1810944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0344046599662074e-05, |
| "loss": 3.4277, |
| "step": 1811456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0335660652151554e-05, |
| "loss": 3.4214, |
| "step": 1811968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0327291083444766e-05, |
| "loss": 3.4327, |
| "step": 1812480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0318905135934243e-05, |
| "loss": 3.4315, |
| "step": 1812992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0310519188423723e-05, |
| "loss": 3.4228, |
| "step": 1813504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0302133240913203e-05, |
| "loss": 3.4424, |
| "step": 1814016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0293763672206415e-05, |
| "loss": 3.4398, |
| "step": 1814528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0285377724695895e-05, |
| "loss": 3.4331, |
| "step": 1815040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0276991777185375e-05, |
| "loss": 3.4405, |
| "step": 1815552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0268605829674855e-05, |
| "loss": 3.435, |
| "step": 1816064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0260236260968064e-05, |
| "loss": 3.4339, |
| "step": 1816576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0251850313457547e-05, |
| "loss": 3.4302, |
| "step": 1817088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0243464365947027e-05, |
| "loss": 3.4365, |
| "step": 1817600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0235078418436507e-05, |
| "loss": 3.4284, |
| "step": 1818112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0226708849729716e-05, |
| "loss": 3.4396, |
| "step": 1818624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0218322902219196e-05, |
| "loss": 3.4266, |
| "step": 1819136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0209936954708676e-05, |
| "loss": 3.4315, |
| "step": 1819648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0201551007198156e-05, |
| "loss": 3.428, |
| "step": 1820160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.019318143849137e-05, |
| "loss": 3.4226, |
| "step": 1820672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.018479549098085e-05, |
| "loss": 3.4248, |
| "step": 1821184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.017640954347033e-05, |
| "loss": 3.4363, |
| "step": 1821696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.016802359595981e-05, |
| "loss": 3.4312, |
| "step": 1822208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0159654027253018e-05, |
| "loss": 3.4413, |
| "step": 1822720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.01512680797425e-05, |
| "loss": 3.4203, |
| "step": 1823232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.014288213223198e-05, |
| "loss": 3.446, |
| "step": 1823744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.013449618472146e-05, |
| "loss": 3.433, |
| "step": 1824256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.012612661601467e-05, |
| "loss": 3.4367, |
| "step": 1824768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.011774066850415e-05, |
| "loss": 3.4356, |
| "step": 1825280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.010935472099363e-05, |
| "loss": 3.4304, |
| "step": 1825792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0100985152286842e-05, |
| "loss": 3.4329, |
| "step": 1826304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0092599204776322e-05, |
| "loss": 3.4451, |
| "step": 1826816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0084213257265802e-05, |
| "loss": 3.4271, |
| "step": 1827328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0075827309755282e-05, |
| "loss": 3.4353, |
| "step": 1827840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.006745774104849e-05, |
| "loss": 3.4346, |
| "step": 1828352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.005907179353797e-05, |
| "loss": 3.4309, |
| "step": 1828864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0050685846027455e-05, |
| "loss": 3.4291, |
| "step": 1829376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0042299898516935e-05, |
| "loss": 3.4318, |
| "step": 1829888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0033930329810144e-05, |
| "loss": 3.4423, |
| "step": 1830400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0025544382299624e-05, |
| "loss": 3.4255, |
| "step": 1830912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0017158434789104e-05, |
| "loss": 3.4373, |
| "step": 1831424 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.858924388885498, |
| "eval_runtime": 310.3949, |
| "eval_samples_per_second": 1229.373, |
| "eval_steps_per_second": 38.419, |
| "step": 1831680 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.273860426623017e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|