| { | |
| "best_metric": 3.8686816692352295, | |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/existential-there-quantifier/transformer/4/checkpoints/checkpoint-686880", | |
| "epoch": 1.0250006060157382, | |
| "eval_steps": 10, | |
| "global_step": 686880, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999998362119627e-05, | |
| "loss": 10.9521, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.999161405248948e-05, | |
| "loss": 6.8262, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.998322810497896e-05, | |
| "loss": 6.1939, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.997484215746844e-05, | |
| "loss": 5.9899, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.996645620995792e-05, | |
| "loss": 5.8315, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99580702624474e-05, | |
| "loss": 5.734, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994968431493688e-05, | |
| "loss": 5.6212, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.994129836742636e-05, | |
| "loss": 5.5598, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.993291241991584e-05, | |
| "loss": 5.4784, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.992452647240532e-05, | |
| "loss": 5.4148, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99161405248948e-05, | |
| "loss": 5.3678, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.990775457738428e-05, | |
| "loss": 5.3295, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989936862987376e-05, | |
| "loss": 5.2839, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.989099906116697e-05, | |
| "loss": 5.2291, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.988261311365645e-05, | |
| "loss": 5.1923, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.987422716614593e-05, | |
| "loss": 5.1557, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.986584121863541e-05, | |
| "loss": 5.1189, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.985745527112489e-05, | |
| "loss": 5.0961, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984906932361437e-05, | |
| "loss": 5.0691, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.984068337610385e-05, | |
| "loss": 5.0371, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.983229742859333e-05, | |
| "loss": 5.0251, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.982391148108281e-05, | |
| "loss": 4.9935, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9815541912376026e-05, | |
| "loss": 4.9745, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9807155964865506e-05, | |
| "loss": 4.9472, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9798770017354986e-05, | |
| "loss": 4.9372, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9790384069844466e-05, | |
| "loss": 4.9066, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9782014501137675e-05, | |
| "loss": 4.8872, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9773628553627155e-05, | |
| "loss": 4.8746, | |
| "step": 13824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9765242606116635e-05, | |
| "loss": 4.8532, | |
| "step": 14336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9756856658606115e-05, | |
| "loss": 4.8294, | |
| "step": 14848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9748487089899324e-05, | |
| "loss": 4.824, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9740117521192533e-05, | |
| "loss": 4.8135, | |
| "step": 15872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9731731573682013e-05, | |
| "loss": 4.7994, | |
| "step": 16384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.97233456261715e-05, | |
| "loss": 4.7811, | |
| "step": 16896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.971495967866098e-05, | |
| "loss": 4.777, | |
| "step": 17408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.970657373115046e-05, | |
| "loss": 4.754, | |
| "step": 17920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.969820416244367e-05, | |
| "loss": 4.7444, | |
| "step": 18432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968981821493315e-05, | |
| "loss": 4.7168, | |
| "step": 18944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.968143226742263e-05, | |
| "loss": 4.7181, | |
| "step": 19456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.967304631991211e-05, | |
| "loss": 4.7048, | |
| "step": 19968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.966466037240159e-05, | |
| "loss": 4.7026, | |
| "step": 20480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.965627442489107e-05, | |
| "loss": 4.6893, | |
| "step": 20992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.964788847738054e-05, | |
| "loss": 4.6854, | |
| "step": 21504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963950252987002e-05, | |
| "loss": 4.6668, | |
| "step": 22016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.963113296116324e-05, | |
| "loss": 4.6605, | |
| "step": 22528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.962274701365272e-05, | |
| "loss": 4.6566, | |
| "step": 23040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.96143610661422e-05, | |
| "loss": 4.6392, | |
| "step": 23552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.960597511863168e-05, | |
| "loss": 4.6453, | |
| "step": 24064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.959758917112116e-05, | |
| "loss": 4.6222, | |
| "step": 24576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95892359812181e-05, | |
| "loss": 4.6061, | |
| "step": 25088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.958085003370758e-05, | |
| "loss": 4.6182, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.957246408619706e-05, | |
| "loss": 4.5989, | |
| "step": 26112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.956407813868654e-05, | |
| "loss": 4.5903, | |
| "step": 26624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9555692191176016e-05, | |
| "loss": 4.5709, | |
| "step": 27136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9547306243665496e-05, | |
| "loss": 4.5828, | |
| "step": 27648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9538920296154976e-05, | |
| "loss": 4.5577, | |
| "step": 28160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9530534348644456e-05, | |
| "loss": 4.5893, | |
| "step": 28672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.952216477993767e-05, | |
| "loss": 4.5521, | |
| "step": 29184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.951377883242715e-05, | |
| "loss": 4.5646, | |
| "step": 29696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.950539288491663e-05, | |
| "loss": 4.5548, | |
| "step": 30208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.949700693740611e-05, | |
| "loss": 4.53, | |
| "step": 30720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.948863736869932e-05, | |
| "loss": 4.5234, | |
| "step": 31232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.94802514211888e-05, | |
| "loss": 4.528, | |
| "step": 31744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.947186547367828e-05, | |
| "loss": 4.5125, | |
| "step": 32256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.946347952616776e-05, | |
| "loss": 4.506, | |
| "step": 32768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.945510995746097e-05, | |
| "loss": 4.5236, | |
| "step": 33280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.944672400995045e-05, | |
| "loss": 4.507, | |
| "step": 33792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943833806243993e-05, | |
| "loss": 4.4937, | |
| "step": 34304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.942995211492941e-05, | |
| "loss": 4.4786, | |
| "step": 34816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9421582546222625e-05, | |
| "loss": 4.4766, | |
| "step": 35328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9413196598712105e-05, | |
| "loss": 4.487, | |
| "step": 35840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9404810651201585e-05, | |
| "loss": 4.489, | |
| "step": 36352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9396424703691065e-05, | |
| "loss": 4.4771, | |
| "step": 36864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9388038756180545e-05, | |
| "loss": 4.4754, | |
| "step": 37376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9379652808670025e-05, | |
| "loss": 4.4726, | |
| "step": 37888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9371266861159505e-05, | |
| "loss": 4.4661, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9362897292452714e-05, | |
| "loss": 4.454, | |
| "step": 38912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9354511344942194e-05, | |
| "loss": 4.4516, | |
| "step": 39424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9346125397431674e-05, | |
| "loss": 4.4477, | |
| "step": 39936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9337739449921154e-05, | |
| "loss": 4.4351, | |
| "step": 40448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9329353502410634e-05, | |
| "loss": 4.4384, | |
| "step": 40960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9320967554900114e-05, | |
| "loss": 4.4397, | |
| "step": 41472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9312581607389594e-05, | |
| "loss": 4.4394, | |
| "step": 41984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9304195659879074e-05, | |
| "loss": 4.4221, | |
| "step": 42496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.929582609117229e-05, | |
| "loss": 4.4017, | |
| "step": 43008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.928744014366177e-05, | |
| "loss": 4.4219, | |
| "step": 43520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927905419615125e-05, | |
| "loss": 4.4163, | |
| "step": 44032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927066824864073e-05, | |
| "loss": 4.4146, | |
| "step": 44544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.926229867993394e-05, | |
| "loss": 4.4065, | |
| "step": 45056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.925392911122715e-05, | |
| "loss": 4.4078, | |
| "step": 45568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.924554316371663e-05, | |
| "loss": 4.3966, | |
| "step": 46080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.923715721620611e-05, | |
| "loss": 4.3884, | |
| "step": 46592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922877126869559e-05, | |
| "loss": 4.3916, | |
| "step": 47104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.922038532118507e-05, | |
| "loss": 4.3893, | |
| "step": 47616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.921201575247828e-05, | |
| "loss": 4.3966, | |
| "step": 48128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.920362980496776e-05, | |
| "loss": 4.3941, | |
| "step": 48640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.919524385745724e-05, | |
| "loss": 4.3859, | |
| "step": 49152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.918685790994672e-05, | |
| "loss": 4.3678, | |
| "step": 49664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.91784719624362e-05, | |
| "loss": 4.3704, | |
| "step": 50176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9170086014925676e-05, | |
| "loss": 4.3724, | |
| "step": 50688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9161700067415156e-05, | |
| "loss": 4.3715, | |
| "step": 51200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9153314119904636e-05, | |
| "loss": 4.3604, | |
| "step": 51712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914494455119785e-05, | |
| "loss": 4.3594, | |
| "step": 52224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9136558603687325e-05, | |
| "loss": 4.3481, | |
| "step": 52736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9128172656176805e-05, | |
| "loss": 4.3645, | |
| "step": 53248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9119786708666285e-05, | |
| "loss": 4.3332, | |
| "step": 53760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.911143351876323e-05, | |
| "loss": 4.3462, | |
| "step": 54272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.910304757125272e-05, | |
| "loss": 4.3458, | |
| "step": 54784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90946616237422e-05, | |
| "loss": 4.3349, | |
| "step": 55296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.908627567623168e-05, | |
| "loss": 4.3354, | |
| "step": 55808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.907788972872115e-05, | |
| "loss": 4.3367, | |
| "step": 56320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906950378121063e-05, | |
| "loss": 4.3286, | |
| "step": 56832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.906111783370011e-05, | |
| "loss": 4.3344, | |
| "step": 57344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.905273188618959e-05, | |
| "loss": 4.3296, | |
| "step": 57856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90443623174828e-05, | |
| "loss": 4.3226, | |
| "step": 58368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.903597636997228e-05, | |
| "loss": 4.3259, | |
| "step": 58880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.902759042246176e-05, | |
| "loss": 4.33, | |
| "step": 59392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9019220853754975e-05, | |
| "loss": 4.3205, | |
| "step": 59904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9010834906244455e-05, | |
| "loss": 4.3099, | |
| "step": 60416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9002448958733935e-05, | |
| "loss": 4.3134, | |
| "step": 60928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8994063011223415e-05, | |
| "loss": 4.314, | |
| "step": 61440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8985677063712895e-05, | |
| "loss": 4.3069, | |
| "step": 61952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8977291116202375e-05, | |
| "loss": 4.2962, | |
| "step": 62464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8968905168691855e-05, | |
| "loss": 4.3048, | |
| "step": 62976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8960535599985064e-05, | |
| "loss": 4.3082, | |
| "step": 63488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8952149652474544e-05, | |
| "loss": 4.2993, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8943763704964024e-05, | |
| "loss": 4.2944, | |
| "step": 64512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8935377757453504e-05, | |
| "loss": 4.2866, | |
| "step": 65024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.892700818874671e-05, | |
| "loss": 4.2897, | |
| "step": 65536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891862224123619e-05, | |
| "loss": 4.2842, | |
| "step": 66048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.891023629372567e-05, | |
| "loss": 4.2976, | |
| "step": 66560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.890185034621515e-05, | |
| "loss": 4.2894, | |
| "step": 67072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.889346439870464e-05, | |
| "loss": 4.2701, | |
| "step": 67584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.888507845119412e-05, | |
| "loss": 4.2808, | |
| "step": 68096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88766925036836e-05, | |
| "loss": 4.2843, | |
| "step": 68608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886830655617308e-05, | |
| "loss": 4.2759, | |
| "step": 69120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885993698746629e-05, | |
| "loss": 4.2816, | |
| "step": 69632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.88515674187595e-05, | |
| "loss": 4.2675, | |
| "step": 70144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.884318147124898e-05, | |
| "loss": 4.2764, | |
| "step": 70656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.883479552373846e-05, | |
| "loss": 4.2789, | |
| "step": 71168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.882640957622794e-05, | |
| "loss": 4.2674, | |
| "step": 71680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8818040007521146e-05, | |
| "loss": 4.2642, | |
| "step": 72192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8809654060010626e-05, | |
| "loss": 4.2654, | |
| "step": 72704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8801268112500106e-05, | |
| "loss": 4.2532, | |
| "step": 73216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.8792882164989586e-05, | |
| "loss": 4.2532, | |
| "step": 73728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.878449621747907e-05, | |
| "loss": 4.266, | |
| "step": 74240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.877611026996855e-05, | |
| "loss": 4.2455, | |
| "step": 74752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.876772432245803e-05, | |
| "loss": 4.2504, | |
| "step": 75264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875933837494751e-05, | |
| "loss": 4.2546, | |
| "step": 75776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.875096880624072e-05, | |
| "loss": 4.2507, | |
| "step": 76288 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.215391635894775, | |
| "eval_runtime": 310.9131, | |
| "eval_samples_per_second": 1227.324, | |
| "eval_steps_per_second": 38.355, | |
| "step": 76320 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.87425828587302e-05, | |
| "loss": 4.2277, | |
| "step": 76800 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.873419691121968e-05, | |
| "loss": 4.2349, | |
| "step": 77312 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.872581096370916e-05, | |
| "loss": 4.253, | |
| "step": 77824 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8717425016198635e-05, | |
| "loss": 4.2354, | |
| "step": 78336 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8709039068688115e-05, | |
| "loss": 4.2476, | |
| "step": 78848 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8700653121177595e-05, | |
| "loss": 4.2215, | |
| "step": 79360 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8692267173667075e-05, | |
| "loss": 4.2376, | |
| "step": 79872 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8683881226156555e-05, | |
| "loss": 4.2233, | |
| "step": 80384 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.867549527864604e-05, | |
| "loss": 4.2222, | |
| "step": 80896 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.866710933113552e-05, | |
| "loss": 4.2242, | |
| "step": 81408 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8658723383625e-05, | |
| "loss": 4.2245, | |
| "step": 81920 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.865033743611448e-05, | |
| "loss": 4.2279, | |
| "step": 82432 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.864198424621142e-05, | |
| "loss": 4.2123, | |
| "step": 82944 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.86335982987009e-05, | |
| "loss": 4.2083, | |
| "step": 83456 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.862521235119038e-05, | |
| "loss": 4.2065, | |
| "step": 83968 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.861682640367986e-05, | |
| "loss": 4.2035, | |
| "step": 84480 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.860844045616934e-05, | |
| "loss": 4.202, | |
| "step": 84992 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.860005450865882e-05, | |
| "loss": 4.2046, | |
| "step": 85504 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.85916685611483e-05, | |
| "loss": 4.2018, | |
| "step": 86016 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.858328261363778e-05, | |
| "loss": 4.2275, | |
| "step": 86528 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.857489666612726e-05, | |
| "loss": 4.2, | |
| "step": 87040 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.856651071861674e-05, | |
| "loss": 4.2051, | |
| "step": 87552 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8558124771106226e-05, | |
| "loss": 4.1988, | |
| "step": 88064 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8549755202399435e-05, | |
| "loss": 4.2047, | |
| "step": 88576 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8541385633692644e-05, | |
| "loss": 4.1937, | |
| "step": 89088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8532999686182124e-05, | |
| "loss": 4.1918, | |
| "step": 89600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8524613738671604e-05, | |
| "loss": 4.1903, | |
| "step": 90112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8516227791161084e-05, | |
| "loss": 4.1845, | |
| "step": 90624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.8507841843650564e-05, | |
| "loss": 4.1698, | |
| "step": 91136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.849947227494377e-05, | |
| "loss": 4.1863, | |
| "step": 91648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.849108632743325e-05, | |
| "loss": 4.1834, | |
| "step": 92160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.848271675872646e-05, | |
| "loss": 4.1882, | |
| "step": 92672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.847433081121595e-05, | |
| "loss": 4.1787, | |
| "step": 93184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.846594486370543e-05, | |
| "loss": 4.1829, | |
| "step": 93696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.845755891619491e-05, | |
| "loss": 4.1718, | |
| "step": 94208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.844917296868439e-05, | |
| "loss": 4.1724, | |
| "step": 94720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.844078702117387e-05, | |
| "loss": 4.1591, | |
| "step": 95232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.843240107366335e-05, | |
| "loss": 4.1669, | |
| "step": 95744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.842401512615282e-05, | |
| "loss": 4.1633, | |
| "step": 96256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.84156291786423e-05, | |
| "loss": 4.1638, | |
| "step": 96768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.840725960993552e-05, | |
| "loss": 4.1644, | |
| "step": 97280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8398873662425e-05, | |
| "loss": 4.1699, | |
| "step": 97792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.839048771491447e-05, | |
| "loss": 4.1592, | |
| "step": 98304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.838210176740395e-05, | |
| "loss": 4.1607, | |
| "step": 98816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.837371581989343e-05, | |
| "loss": 4.1635, | |
| "step": 99328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.836532987238292e-05, | |
| "loss": 4.1495, | |
| "step": 99840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8356960303676126e-05, | |
| "loss": 4.1611, | |
| "step": 100352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8348574356165606e-05, | |
| "loss": 4.1478, | |
| "step": 100864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8340188408655086e-05, | |
| "loss": 4.1404, | |
| "step": 101376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8331802461144566e-05, | |
| "loss": 4.1568, | |
| "step": 101888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8323416513634046e-05, | |
| "loss": 4.1439, | |
| "step": 102400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8315046944927255e-05, | |
| "loss": 4.1408, | |
| "step": 102912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8306660997416735e-05, | |
| "loss": 4.1337, | |
| "step": 103424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8298275049906215e-05, | |
| "loss": 4.1415, | |
| "step": 103936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8289889102395695e-05, | |
| "loss": 4.1263, | |
| "step": 104448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8281519533688904e-05, | |
| "loss": 4.1564, | |
| "step": 104960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8273133586178384e-05, | |
| "loss": 4.1318, | |
| "step": 105472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.826474763866787e-05, | |
| "loss": 4.1453, | |
| "step": 105984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.825636169115735e-05, | |
| "loss": 4.1426, | |
| "step": 106496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.824797574364683e-05, | |
| "loss": 4.1245, | |
| "step": 107008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.823960617494004e-05, | |
| "loss": 4.1192, | |
| "step": 107520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.823122022742952e-05, | |
| "loss": 4.1306, | |
| "step": 108032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8222834279919e-05, | |
| "loss": 4.1206, | |
| "step": 108544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.821446471121221e-05, | |
| "loss": 4.1166, | |
| "step": 109056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.820607876370169e-05, | |
| "loss": 4.1337, | |
| "step": 109568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.819769281619117e-05, | |
| "loss": 4.1317, | |
| "step": 110080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.818930686868065e-05, | |
| "loss": 4.1154, | |
| "step": 110592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.818093729997386e-05, | |
| "loss": 4.1098, | |
| "step": 111104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.817255135246334e-05, | |
| "loss": 4.1121, | |
| "step": 111616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.816416540495282e-05, | |
| "loss": 4.1219, | |
| "step": 112128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8155779457442305e-05, | |
| "loss": 4.1236, | |
| "step": 112640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8147393509931785e-05, | |
| "loss": 4.1217, | |
| "step": 113152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8139023941224994e-05, | |
| "loss": 4.1226, | |
| "step": 113664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8130637993714474e-05, | |
| "loss": 4.1226, | |
| "step": 114176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8122252046203954e-05, | |
| "loss": 4.1236, | |
| "step": 114688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8113866098693434e-05, | |
| "loss": 4.1056, | |
| "step": 115200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.810549652998664e-05, | |
| "loss": 4.1138, | |
| "step": 115712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.809711058247612e-05, | |
| "loss": 4.1126, | |
| "step": 116224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.80887246349656e-05, | |
| "loss": 4.0995, | |
| "step": 116736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.808033868745508e-05, | |
| "loss": 4.1112, | |
| "step": 117248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.807196911874829e-05, | |
| "loss": 4.1137, | |
| "step": 117760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.806358317123777e-05, | |
| "loss": 4.1145, | |
| "step": 118272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.805519722372726e-05, | |
| "loss": 4.0972, | |
| "step": 118784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.804681127621674e-05, | |
| "loss": 4.0862, | |
| "step": 119296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.803844170750995e-05, | |
| "loss": 4.103, | |
| "step": 119808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.803005575999943e-05, | |
| "loss": 4.1014, | |
| "step": 120320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.802166981248891e-05, | |
| "loss": 4.1048, | |
| "step": 120832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.801328386497839e-05, | |
| "loss": 4.0976, | |
| "step": 121344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.8004914296271596e-05, | |
| "loss": 4.0983, | |
| "step": 121856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7996528348761076e-05, | |
| "loss": 4.0983, | |
| "step": 122368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7988142401250556e-05, | |
| "loss": 4.0844, | |
| "step": 122880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7979756453740036e-05, | |
| "loss": 4.0928, | |
| "step": 123392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7971370506229516e-05, | |
| "loss": 4.0927, | |
| "step": 123904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7962984558718996e-05, | |
| "loss": 4.1017, | |
| "step": 124416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7954598611208476e-05, | |
| "loss": 4.1035, | |
| "step": 124928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7946212663697956e-05, | |
| "loss": 4.0972, | |
| "step": 125440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.793784309499117e-05, | |
| "loss": 4.0823, | |
| "step": 125952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.792945714748065e-05, | |
| "loss": 4.0875, | |
| "step": 126464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.792107119997013e-05, | |
| "loss": 4.0865, | |
| "step": 126976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7912685252459605e-05, | |
| "loss": 4.0953, | |
| "step": 127488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.790431568375282e-05, | |
| "loss": 4.0823, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.78959297362423e-05, | |
| "loss": 4.0804, | |
| "step": 128512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7887543788731774e-05, | |
| "loss": 4.0732, | |
| "step": 129024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7879157841221254e-05, | |
| "loss": 4.0938, | |
| "step": 129536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.787078827251447e-05, | |
| "loss": 4.0612, | |
| "step": 130048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.786240232500395e-05, | |
| "loss": 4.0773, | |
| "step": 130560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7854032756297166e-05, | |
| "loss": 4.0838, | |
| "step": 131072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7845646808786646e-05, | |
| "loss": 4.0714, | |
| "step": 131584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7837260861276126e-05, | |
| "loss": 4.0724, | |
| "step": 132096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7828874913765606e-05, | |
| "loss": 4.0809, | |
| "step": 132608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.782048896625508e-05, | |
| "loss": 4.071, | |
| "step": 133120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.781210301874456e-05, | |
| "loss": 4.0731, | |
| "step": 133632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.780371707123404e-05, | |
| "loss": 4.0769, | |
| "step": 134144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.779534750252725e-05, | |
| "loss": 4.0702, | |
| "step": 134656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.778696155501673e-05, | |
| "loss": 4.0772, | |
| "step": 135168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.777857560750621e-05, | |
| "loss": 4.0821, | |
| "step": 135680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7770206038799423e-05, | |
| "loss": 4.0731, | |
| "step": 136192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7761820091288903e-05, | |
| "loss": 4.0653, | |
| "step": 136704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7753434143778383e-05, | |
| "loss": 4.0699, | |
| "step": 137216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.774504819626786e-05, | |
| "loss": 4.0673, | |
| "step": 137728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.773666224875734e-05, | |
| "loss": 4.0636, | |
| "step": 138240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.772827630124682e-05, | |
| "loss": 4.0588, | |
| "step": 138752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.77198903537363e-05, | |
| "loss": 4.0619, | |
| "step": 139264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.771150440622578e-05, | |
| "loss": 4.0748, | |
| "step": 139776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.770313483751899e-05, | |
| "loss": 4.0638, | |
| "step": 140288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.769474889000847e-05, | |
| "loss": 4.0637, | |
| "step": 140800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.768636294249795e-05, | |
| "loss": 4.0532, | |
| "step": 141312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.767799337379116e-05, | |
| "loss": 4.0561, | |
| "step": 141824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.766960742628064e-05, | |
| "loss": 4.0539, | |
| "step": 142336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.766122147877012e-05, | |
| "loss": 4.0708, | |
| "step": 142848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.76528355312596e-05, | |
| "loss": 4.0662, | |
| "step": 143360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.764444958374909e-05, | |
| "loss": 4.044, | |
| "step": 143872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.76360800150423e-05, | |
| "loss": 4.0523, | |
| "step": 144384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.762769406753178e-05, | |
| "loss": 4.0622, | |
| "step": 144896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.761930812002126e-05, | |
| "loss": 4.0547, | |
| "step": 145408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.761092217251074e-05, | |
| "loss": 4.0627, | |
| "step": 145920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7602552603803946e-05, | |
| "loss": 4.0484, | |
| "step": 146432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7594166656293426e-05, | |
| "loss": 4.0548, | |
| "step": 146944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7585780708782906e-05, | |
| "loss": 4.0605, | |
| "step": 147456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7577394761272386e-05, | |
| "loss": 4.05, | |
| "step": 147968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7569025192565595e-05, | |
| "loss": 4.0513, | |
| "step": 148480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7560639245055075e-05, | |
| "loss": 4.0532, | |
| "step": 148992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7552253297544555e-05, | |
| "loss": 4.0434, | |
| "step": 149504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.7543867350034035e-05, | |
| "loss": 4.042, | |
| "step": 150016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.753549778132725e-05, | |
| "loss": 4.0553, | |
| "step": 150528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.752711183381673e-05, | |
| "loss": 4.0371, | |
| "step": 151040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.751872588630621e-05, | |
| "loss": 4.0451, | |
| "step": 151552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.751035631759942e-05, | |
| "loss": 4.0469, | |
| "step": 152064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.75019703700889e-05, | |
| "loss": 4.0435, | |
| "step": 152576 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 4.046647071838379, | |
| "eval_runtime": 311.1083, | |
| "eval_samples_per_second": 1226.554, | |
| "eval_steps_per_second": 38.331, | |
| "step": 152640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.749358442257838e-05, | |
| "loss": 4.0327, | |
| "step": 153088 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.748519847506786e-05, | |
| "loss": 4.0312, | |
| "step": 153600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.747682890636107e-05, | |
| "loss": 4.0519, | |
| "step": 154112 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.746844295885055e-05, | |
| "loss": 4.033, | |
| "step": 154624 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.746005701134003e-05, | |
| "loss": 4.0513, | |
| "step": 155136 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.745167106382951e-05, | |
| "loss": 4.0273, | |
| "step": 155648 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7443301495122724e-05, | |
| "loss": 4.0415, | |
| "step": 156160 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7434915547612204e-05, | |
| "loss": 4.0236, | |
| "step": 156672 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7426529600101684e-05, | |
| "loss": 4.0375, | |
| "step": 157184 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7418143652591164e-05, | |
| "loss": 4.0319, | |
| "step": 157696 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.740977408388437e-05, | |
| "loss": 4.0307, | |
| "step": 158208 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.740138813637385e-05, | |
| "loss": 4.0398, | |
| "step": 158720 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.739303494647079e-05, | |
| "loss": 4.0228, | |
| "step": 159232 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.738464899896027e-05, | |
| "loss": 4.0213, | |
| "step": 159744 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.737626305144975e-05, | |
| "loss": 4.0197, | |
| "step": 160256 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.736787710393923e-05, | |
| "loss": 4.0152, | |
| "step": 160768 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.735949115642871e-05, | |
| "loss": 4.02, | |
| "step": 161280 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.73511052089182e-05, | |
| "loss": 4.0234, | |
| "step": 161792 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.734271926140768e-05, | |
| "loss": 4.0184, | |
| "step": 162304 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.733433331389716e-05, | |
| "loss": 4.0446, | |
| "step": 162816 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.732594736638664e-05, | |
| "loss": 4.0219, | |
| "step": 163328 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.731756141887612e-05, | |
| "loss": 4.027, | |
| "step": 163840 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.73091754713656e-05, | |
| "loss": 4.0213, | |
| "step": 164352 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.730080590265881e-05, | |
| "loss": 4.0271, | |
| "step": 164864 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7292436333952016e-05, | |
| "loss": 4.0184, | |
| "step": 165376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7284050386441496e-05, | |
| "loss": 4.0156, | |
| "step": 165888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7275664438930976e-05, | |
| "loss": 4.0186, | |
| "step": 166400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7267278491420456e-05, | |
| "loss": 4.0088, | |
| "step": 166912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7258892543909936e-05, | |
| "loss": 3.9972, | |
| "step": 167424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7250506596399416e-05, | |
| "loss": 4.0124, | |
| "step": 167936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7242120648888896e-05, | |
| "loss": 4.0198, | |
| "step": 168448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.723375108018211e-05, | |
| "loss": 4.0134, | |
| "step": 168960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.722536513267159e-05, | |
| "loss": 4.0135, | |
| "step": 169472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.721697918516107e-05, | |
| "loss": 4.0162, | |
| "step": 169984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.720859323765055e-05, | |
| "loss": 4.0041, | |
| "step": 170496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.720020729014003e-05, | |
| "loss": 4.0056, | |
| "step": 171008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7191821342629505e-05, | |
| "loss": 3.9981, | |
| "step": 171520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7183435395118985e-05, | |
| "loss": 4.0008, | |
| "step": 172032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.71750658264122e-05, | |
| "loss": 3.9971, | |
| "step": 172544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.716667987890168e-05, | |
| "loss": 4.0007, | |
| "step": 173056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7158293931391154e-05, | |
| "loss": 4.0031, | |
| "step": 173568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7149907983880634e-05, | |
| "loss": 4.0095, | |
| "step": 174080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.714153841517385e-05, | |
| "loss": 3.9988, | |
| "step": 174592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.713315246766333e-05, | |
| "loss": 4.0011, | |
| "step": 175104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.712476652015281e-05, | |
| "loss": 4.0069, | |
| "step": 175616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.711638057264229e-05, | |
| "loss": 3.9893, | |
| "step": 176128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7108011003935505e-05, | |
| "loss": 4.0018, | |
| "step": 176640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7099641435228714e-05, | |
| "loss": 3.9958, | |
| "step": 177152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7091255487718194e-05, | |
| "loss": 3.9801, | |
| "step": 177664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7082869540207674e-05, | |
| "loss": 3.9993, | |
| "step": 178176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.7074483592697154e-05, | |
| "loss": 3.9926, | |
| "step": 178688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.706609764518663e-05, | |
| "loss": 3.9863, | |
| "step": 179200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.705771169767611e-05, | |
| "loss": 3.9854, | |
| "step": 179712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.704932575016559e-05, | |
| "loss": 3.9889, | |
| "step": 180224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.70409561814588e-05, | |
| "loss": 3.9718, | |
| "step": 180736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.703257023394828e-05, | |
| "loss": 3.9995, | |
| "step": 181248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.702418428643776e-05, | |
| "loss": 3.984, | |
| "step": 181760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.701579833892724e-05, | |
| "loss": 3.9949, | |
| "step": 182272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.700742877022045e-05, | |
| "loss": 3.9958, | |
| "step": 182784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.699904282270993e-05, | |
| "loss": 3.9784, | |
| "step": 183296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.699065687519941e-05, | |
| "loss": 3.9725, | |
| "step": 183808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.698227092768889e-05, | |
| "loss": 3.9873, | |
| "step": 184320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.69739013589821e-05, | |
| "loss": 3.9731, | |
| "step": 184832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.696551541147158e-05, | |
| "loss": 3.9704, | |
| "step": 185344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.695712946396106e-05, | |
| "loss": 3.9896, | |
| "step": 185856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.694874351645054e-05, | |
| "loss": 3.989, | |
| "step": 186368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.694037394774376e-05, | |
| "loss": 3.9721, | |
| "step": 186880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.693200437903697e-05, | |
| "loss": 3.9668, | |
| "step": 187392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.692361843152645e-05, | |
| "loss": 3.9691, | |
| "step": 187904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6915232484015926e-05, | |
| "loss": 3.9776, | |
| "step": 188416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6906846536505406e-05, | |
| "loss": 3.9826, | |
| "step": 188928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6898460588994886e-05, | |
| "loss": 3.9799, | |
| "step": 189440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6890074641484366e-05, | |
| "loss": 3.9864, | |
| "step": 189952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6881688693973846e-05, | |
| "loss": 3.9788, | |
| "step": 190464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6873302746463326e-05, | |
| "loss": 3.9868, | |
| "step": 190976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6864933177756535e-05, | |
| "loss": 3.9687, | |
| "step": 191488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6856547230246015e-05, | |
| "loss": 3.976, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6848161282735495e-05, | |
| "loss": 3.9782, | |
| "step": 192512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.683977533522498e-05, | |
| "loss": 3.9617, | |
| "step": 193024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.683140576651819e-05, | |
| "loss": 3.9745, | |
| "step": 193536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.682301981900767e-05, | |
| "loss": 3.9822, | |
| "step": 194048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.681463387149715e-05, | |
| "loss": 3.9793, | |
| "step": 194560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.680624792398663e-05, | |
| "loss": 3.9633, | |
| "step": 195072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.679787835527984e-05, | |
| "loss": 3.9512, | |
| "step": 195584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.678950878657305e-05, | |
| "loss": 3.9693, | |
| "step": 196096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.678112283906253e-05, | |
| "loss": 3.9614, | |
| "step": 196608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.677273689155201e-05, | |
| "loss": 3.9752, | |
| "step": 197120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.676435094404149e-05, | |
| "loss": 3.9674, | |
| "step": 197632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.675596499653097e-05, | |
| "loss": 3.9663, | |
| "step": 198144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.674757904902045e-05, | |
| "loss": 3.9661, | |
| "step": 198656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6739209480313664e-05, | |
| "loss": 3.9531, | |
| "step": 199168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6730823532803144e-05, | |
| "loss": 3.9639, | |
| "step": 199680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6722437585292624e-05, | |
| "loss": 3.9619, | |
| "step": 200192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6714051637782104e-05, | |
| "loss": 3.9754, | |
| "step": 200704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6705665690271584e-05, | |
| "loss": 3.9767, | |
| "step": 201216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.669729612156479e-05, | |
| "loss": 3.9678, | |
| "step": 201728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.668891017405427e-05, | |
| "loss": 3.9526, | |
| "step": 202240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.668052422654375e-05, | |
| "loss": 3.9628, | |
| "step": 202752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.667213827903323e-05, | |
| "loss": 3.9596, | |
| "step": 203264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.666376871032644e-05, | |
| "loss": 3.9689, | |
| "step": 203776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.665538276281592e-05, | |
| "loss": 3.9552, | |
| "step": 204288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.66469968153054e-05, | |
| "loss": 3.9609, | |
| "step": 204800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.663861086779488e-05, | |
| "loss": 3.9477, | |
| "step": 205312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.663022492028437e-05, | |
| "loss": 3.9694, | |
| "step": 205824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.662183897277385e-05, | |
| "loss": 3.9395, | |
| "step": 206336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.661345302526333e-05, | |
| "loss": 3.9569, | |
| "step": 206848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.660508345655654e-05, | |
| "loss": 3.9525, | |
| "step": 207360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.659669750904602e-05, | |
| "loss": 3.9535, | |
| "step": 207872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.65883115615355e-05, | |
| "loss": 3.9485, | |
| "step": 208384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.657992561402498e-05, | |
| "loss": 3.9583, | |
| "step": 208896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6571556045318187e-05, | |
| "loss": 3.957, | |
| "step": 209408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6563170097807667e-05, | |
| "loss": 3.9495, | |
| "step": 209920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6554784150297146e-05, | |
| "loss": 3.9584, | |
| "step": 210432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6546398202786626e-05, | |
| "loss": 3.945, | |
| "step": 210944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6538028634079836e-05, | |
| "loss": 3.9632, | |
| "step": 211456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.652964268656932e-05, | |
| "loss": 3.9656, | |
| "step": 211968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.652127311786253e-05, | |
| "loss": 3.9494, | |
| "step": 212480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.651288717035201e-05, | |
| "loss": 3.9473, | |
| "step": 212992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.650450122284149e-05, | |
| "loss": 3.9496, | |
| "step": 213504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.649611527533097e-05, | |
| "loss": 3.9493, | |
| "step": 214016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.648772932782045e-05, | |
| "loss": 3.9442, | |
| "step": 214528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.647934338030993e-05, | |
| "loss": 3.9441, | |
| "step": 215040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6470957432799404e-05, | |
| "loss": 3.9407, | |
| "step": 215552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6462571485288884e-05, | |
| "loss": 3.9588, | |
| "step": 216064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6454185537778364e-05, | |
| "loss": 3.9502, | |
| "step": 216576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6445799590267844e-05, | |
| "loss": 3.9466, | |
| "step": 217088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.643743002156106e-05, | |
| "loss": 3.9386, | |
| "step": 217600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.642904407405054e-05, | |
| "loss": 3.9433, | |
| "step": 218112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.642065812654002e-05, | |
| "loss": 3.9371, | |
| "step": 218624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.64122721790295e-05, | |
| "loss": 3.9583, | |
| "step": 219136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.640390261032271e-05, | |
| "loss": 3.9533, | |
| "step": 219648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.639551666281219e-05, | |
| "loss": 3.935, | |
| "step": 220160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.638713071530167e-05, | |
| "loss": 3.9359, | |
| "step": 220672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.637874476779115e-05, | |
| "loss": 3.95, | |
| "step": 221184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.637035882028063e-05, | |
| "loss": 3.9411, | |
| "step": 221696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.636198925157384e-05, | |
| "loss": 3.9529, | |
| "step": 222208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.635360330406332e-05, | |
| "loss": 3.9349, | |
| "step": 222720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.63452173565528e-05, | |
| "loss": 3.9482, | |
| "step": 223232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.633683140904228e-05, | |
| "loss": 3.9436, | |
| "step": 223744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.632844546153176e-05, | |
| "loss": 3.9447, | |
| "step": 224256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6320075892824974e-05, | |
| "loss": 3.9419, | |
| "step": 224768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6311689945314454e-05, | |
| "loss": 3.9416, | |
| "step": 225280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6303303997803934e-05, | |
| "loss": 3.9338, | |
| "step": 225792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6294918050293413e-05, | |
| "loss": 3.9367, | |
| "step": 226304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6286532102782893e-05, | |
| "loss": 3.9452, | |
| "step": 226816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.62781625340761e-05, | |
| "loss": 3.9343, | |
| "step": 227328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.626977658656558e-05, | |
| "loss": 3.9327, | |
| "step": 227840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.626139063905506e-05, | |
| "loss": 3.9366, | |
| "step": 228352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.625300469154454e-05, | |
| "loss": 3.936, | |
| "step": 228864 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.972496509552002, | |
| "eval_runtime": 310.4496, | |
| "eval_samples_per_second": 1229.156, | |
| "eval_steps_per_second": 38.412, | |
| "step": 228960 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.624461874403402e-05, | |
| "loss": 3.9217, | |
| "step": 229376 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.62362327965235e-05, | |
| "loss": 3.9254, | |
| "step": 229888 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.622784684901298e-05, | |
| "loss": 3.9456, | |
| "step": 230400 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.621946090150246e-05, | |
| "loss": 3.9274, | |
| "step": 230912 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.621107495399194e-05, | |
| "loss": 3.9492, | |
| "step": 231424 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.620268900648142e-05, | |
| "loss": 3.9189, | |
| "step": 231936 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.61943030589709e-05, | |
| "loss": 3.9386, | |
| "step": 232448 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.618591711146038e-05, | |
| "loss": 3.923, | |
| "step": 232960 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.617754754275359e-05, | |
| "loss": 3.9291, | |
| "step": 233472 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.616916159524307e-05, | |
| "loss": 3.9294, | |
| "step": 233984 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.616077564773255e-05, | |
| "loss": 3.9265, | |
| "step": 234496 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.615238970022203e-05, | |
| "loss": 3.9348, | |
| "step": 235008 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6144036510318976e-05, | |
| "loss": 3.9207, | |
| "step": 235520 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6135650562808456e-05, | |
| "loss": 3.919, | |
| "step": 236032 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6127264615297936e-05, | |
| "loss": 3.923, | |
| "step": 236544 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6118878667787416e-05, | |
| "loss": 3.9097, | |
| "step": 237056 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6110492720276896e-05, | |
| "loss": 3.9217, | |
| "step": 237568 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6102106772766376e-05, | |
| "loss": 3.9227, | |
| "step": 238080 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6093720825255856e-05, | |
| "loss": 3.9173, | |
| "step": 238592 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6085334877745336e-05, | |
| "loss": 3.9409, | |
| "step": 239104 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6076965309038545e-05, | |
| "loss": 3.925, | |
| "step": 239616 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6068579361528025e-05, | |
| "loss": 3.9313, | |
| "step": 240128 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6060193414017505e-05, | |
| "loss": 3.9201, | |
| "step": 240640 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6051807466506985e-05, | |
| "loss": 3.9252, | |
| "step": 241152 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6043437897800194e-05, | |
| "loss": 3.9186, | |
| "step": 241664 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6035051950289674e-05, | |
| "loss": 3.9183, | |
| "step": 242176 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6026666002779154e-05, | |
| "loss": 3.9209, | |
| "step": 242688 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6018280055268634e-05, | |
| "loss": 3.9092, | |
| "step": 243200 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.6009894107758114e-05, | |
| "loss": 3.9056, | |
| "step": 243712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.600152453905133e-05, | |
| "loss": 3.9104, | |
| "step": 244224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.599313859154081e-05, | |
| "loss": 3.9226, | |
| "step": 244736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.598475264403029e-05, | |
| "loss": 3.9192, | |
| "step": 245248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.597636669651977e-05, | |
| "loss": 3.9207, | |
| "step": 245760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.596799712781298e-05, | |
| "loss": 3.9225, | |
| "step": 246272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.595961118030246e-05, | |
| "loss": 3.9037, | |
| "step": 246784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.595122523279194e-05, | |
| "loss": 3.9113, | |
| "step": 247296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.594283928528142e-05, | |
| "loss": 3.9082, | |
| "step": 247808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.593446971657463e-05, | |
| "loss": 3.9012, | |
| "step": 248320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.592608376906411e-05, | |
| "loss": 3.9038, | |
| "step": 248832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.591769782155359e-05, | |
| "loss": 3.9035, | |
| "step": 249344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.590931187404307e-05, | |
| "loss": 3.9113, | |
| "step": 249856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.590094230533628e-05, | |
| "loss": 3.9138, | |
| "step": 250368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.589255635782576e-05, | |
| "loss": 3.9101, | |
| "step": 250880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.588417041031524e-05, | |
| "loss": 3.9032, | |
| "step": 251392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.587578446280472e-05, | |
| "loss": 3.9155, | |
| "step": 251904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.58673985152942e-05, | |
| "loss": 3.8973, | |
| "step": 252416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.585902894658741e-05, | |
| "loss": 3.9085, | |
| "step": 252928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.585064299907689e-05, | |
| "loss": 3.9034, | |
| "step": 253440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.584225705156637e-05, | |
| "loss": 3.8887, | |
| "step": 253952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.583388748285958e-05, | |
| "loss": 3.9101, | |
| "step": 254464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.582550153534906e-05, | |
| "loss": 3.9018, | |
| "step": 254976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.581711558783854e-05, | |
| "loss": 3.9, | |
| "step": 255488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.580872964032802e-05, | |
| "loss": 3.8919, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.580034369281751e-05, | |
| "loss": 3.8971, | |
| "step": 256512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.579197412411072e-05, | |
| "loss": 3.8835, | |
| "step": 257024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.57835881766002e-05, | |
| "loss": 3.9064, | |
| "step": 257536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.577520222908968e-05, | |
| "loss": 3.8969, | |
| "step": 258048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.576681628157916e-05, | |
| "loss": 3.9018, | |
| "step": 258560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5758430334068637e-05, | |
| "loss": 3.9077, | |
| "step": 259072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5750060765361846e-05, | |
| "loss": 3.8916, | |
| "step": 259584 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5741674817851326e-05, | |
| "loss": 3.8833, | |
| "step": 260096 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5733288870340806e-05, | |
| "loss": 3.9025, | |
| "step": 260608 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5724902922830286e-05, | |
| "loss": 3.8844, | |
| "step": 261120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5716533354123495e-05, | |
| "loss": 3.8822, | |
| "step": 261632 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5708147406612975e-05, | |
| "loss": 3.9011, | |
| "step": 262144 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.569976145910246e-05, | |
| "loss": 3.8981, | |
| "step": 262656 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.569137551159194e-05, | |
| "loss": 3.8866, | |
| "step": 263168 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.568300594288515e-05, | |
| "loss": 3.8839, | |
| "step": 263680 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.567461999537463e-05, | |
| "loss": 3.8768, | |
| "step": 264192 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.566623404786411e-05, | |
| "loss": 3.8869, | |
| "step": 264704 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.565784810035359e-05, | |
| "loss": 3.8984, | |
| "step": 265216 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.56494785316468e-05, | |
| "loss": 3.8971, | |
| "step": 265728 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.564109258413628e-05, | |
| "loss": 3.8999, | |
| "step": 266240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.563270663662576e-05, | |
| "loss": 3.8909, | |
| "step": 266752 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.562432068911524e-05, | |
| "loss": 3.907, | |
| "step": 267264 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.561595112040845e-05, | |
| "loss": 3.8818, | |
| "step": 267776 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.560756517289793e-05, | |
| "loss": 3.8937, | |
| "step": 268288 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5599179225387415e-05, | |
| "loss": 3.8888, | |
| "step": 268800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5590793277876895e-05, | |
| "loss": 3.8754, | |
| "step": 269312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5582407330366375e-05, | |
| "loss": 3.8969, | |
| "step": 269824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5574037761659584e-05, | |
| "loss": 3.8972, | |
| "step": 270336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5565651814149064e-05, | |
| "loss": 3.8899, | |
| "step": 270848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5557265866638544e-05, | |
| "loss": 3.8799, | |
| "step": 271360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5548879919128024e-05, | |
| "loss": 3.872, | |
| "step": 271872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.554051035042123e-05, | |
| "loss": 3.8837, | |
| "step": 272384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.553212440291071e-05, | |
| "loss": 3.8843, | |
| "step": 272896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.552373845540019e-05, | |
| "loss": 3.8908, | |
| "step": 273408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.5515352507889666e-05, | |
| "loss": 3.8801, | |
| "step": 273920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.550698293918288e-05, | |
| "loss": 3.8875, | |
| "step": 274432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.549859699167237e-05, | |
| "loss": 3.8804, | |
| "step": 274944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.549021104416185e-05, | |
| "loss": 3.8762, | |
| "step": 275456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.548182509665132e-05, | |
| "loss": 3.879, | |
| "step": 275968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.547345552794454e-05, | |
| "loss": 3.8761, | |
| "step": 276480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.546506958043402e-05, | |
| "loss": 3.8947, | |
| "step": 276992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.54566836329235e-05, | |
| "loss": 3.8919, | |
| "step": 277504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.544829768541297e-05, | |
| "loss": 3.8919, | |
| "step": 278016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.543992811670619e-05, | |
| "loss": 3.8697, | |
| "step": 278528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5431542169195667e-05, | |
| "loss": 3.8834, | |
| "step": 279040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.542315622168514e-05, | |
| "loss": 3.8794, | |
| "step": 279552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.541477027417462e-05, | |
| "loss": 3.885, | |
| "step": 280064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5406400705467836e-05, | |
| "loss": 3.878, | |
| "step": 280576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.539801475795732e-05, | |
| "loss": 3.8783, | |
| "step": 281088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5389628810446796e-05, | |
| "loss": 3.8685, | |
| "step": 281600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5381242862936275e-05, | |
| "loss": 3.8893, | |
| "step": 282112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.537287329422949e-05, | |
| "loss": 3.8616, | |
| "step": 282624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.536448734671897e-05, | |
| "loss": 3.8763, | |
| "step": 283136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5356101399208444e-05, | |
| "loss": 3.8748, | |
| "step": 283648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5347715451697924e-05, | |
| "loss": 3.8738, | |
| "step": 284160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.533934588299114e-05, | |
| "loss": 3.8693, | |
| "step": 284672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5330959935480613e-05, | |
| "loss": 3.8843, | |
| "step": 285184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5322573987970093e-05, | |
| "loss": 3.8775, | |
| "step": 285696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5314188040459573e-05, | |
| "loss": 3.87, | |
| "step": 286208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.530580209294906e-05, | |
| "loss": 3.8833, | |
| "step": 286720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.529743252424227e-05, | |
| "loss": 3.8692, | |
| "step": 287232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.528904657673175e-05, | |
| "loss": 3.8814, | |
| "step": 287744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.528066062922123e-05, | |
| "loss": 3.8877, | |
| "step": 288256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5272291060514445e-05, | |
| "loss": 3.8764, | |
| "step": 288768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.526390511300392e-05, | |
| "loss": 3.8712, | |
| "step": 289280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.52555191654934e-05, | |
| "loss": 3.8692, | |
| "step": 289792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.524713321798288e-05, | |
| "loss": 3.8695, | |
| "step": 290304 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.523874727047236e-05, | |
| "loss": 3.8679, | |
| "step": 290816 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.523036132296184e-05, | |
| "loss": 3.8735, | |
| "step": 291328 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.522197537545132e-05, | |
| "loss": 3.8586, | |
| "step": 291840 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.52135894279408e-05, | |
| "loss": 3.8814, | |
| "step": 292352 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.520521985923401e-05, | |
| "loss": 3.877, | |
| "step": 292864 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5196833911723494e-05, | |
| "loss": 3.8732, | |
| "step": 293376 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5188447964212974e-05, | |
| "loss": 3.8604, | |
| "step": 293888 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5180062016702454e-05, | |
| "loss": 3.8665, | |
| "step": 294400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.517169244799566e-05, | |
| "loss": 3.8641, | |
| "step": 294912 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.516330650048514e-05, | |
| "loss": 3.878, | |
| "step": 295424 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.515492055297462e-05, | |
| "loss": 3.8808, | |
| "step": 295936 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.51465346054641e-05, | |
| "loss": 3.8664, | |
| "step": 296448 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.513816503675731e-05, | |
| "loss": 3.8539, | |
| "step": 296960 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.512977908924679e-05, | |
| "loss": 3.8741, | |
| "step": 297472 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.512139314173627e-05, | |
| "loss": 3.8653, | |
| "step": 297984 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.511300719422575e-05, | |
| "loss": 3.8803, | |
| "step": 298496 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.510463762551896e-05, | |
| "loss": 3.8593, | |
| "step": 299008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.509625167800845e-05, | |
| "loss": 3.8721, | |
| "step": 299520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.508786573049793e-05, | |
| "loss": 3.8709, | |
| "step": 300032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.507947978298741e-05, | |
| "loss": 3.8751, | |
| "step": 300544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5071110214280616e-05, | |
| "loss": 3.8701, | |
| "step": 301056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5062724266770096e-05, | |
| "loss": 3.867, | |
| "step": 301568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5054338319259576e-05, | |
| "loss": 3.8605, | |
| "step": 302080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5045952371749056e-05, | |
| "loss": 3.8637, | |
| "step": 302592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5037582803042265e-05, | |
| "loss": 3.8686, | |
| "step": 303104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5029196855531745e-05, | |
| "loss": 3.8658, | |
| "step": 303616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5020810908021225e-05, | |
| "loss": 3.8593, | |
| "step": 304128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5012424960510705e-05, | |
| "loss": 3.8691, | |
| "step": 304640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5004055391803914e-05, | |
| "loss": 3.8613, | |
| "step": 305152 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.9313888549804688, | |
| "eval_runtime": 306.8209, | |
| "eval_samples_per_second": 1243.693, | |
| "eval_steps_per_second": 38.866, | |
| "step": 305280 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.49956694442934e-05, | |
| "loss": 3.8555, | |
| "step": 305664 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.498728349678288e-05, | |
| "loss": 3.8555, | |
| "step": 306176 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.497889754927236e-05, | |
| "loss": 3.8705, | |
| "step": 306688 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.497052798056557e-05, | |
| "loss": 3.8593, | |
| "step": 307200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.496214203305505e-05, | |
| "loss": 3.873, | |
| "step": 307712 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.495375608554453e-05, | |
| "loss": 3.8557, | |
| "step": 308224 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.494537013803401e-05, | |
| "loss": 3.8616, | |
| "step": 308736 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.493700056932722e-05, | |
| "loss": 3.8545, | |
| "step": 309248 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.49286146218167e-05, | |
| "loss": 3.8587, | |
| "step": 309760 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.492022867430618e-05, | |
| "loss": 3.8582, | |
| "step": 310272 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.491184272679566e-05, | |
| "loss": 3.8573, | |
| "step": 310784 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.490347315808887e-05, | |
| "loss": 3.8651, | |
| "step": 311296 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4895103589382084e-05, | |
| "loss": 3.8536, | |
| "step": 311808 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4886717641871564e-05, | |
| "loss": 3.849, | |
| "step": 312320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4878331694361044e-05, | |
| "loss": 3.8506, | |
| "step": 312832 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4869945746850524e-05, | |
| "loss": 3.8408, | |
| "step": 313344 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4861559799340004e-05, | |
| "loss": 3.8506, | |
| "step": 313856 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4853173851829484e-05, | |
| "loss": 3.8518, | |
| "step": 314368 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4844787904318964e-05, | |
| "loss": 3.8495, | |
| "step": 314880 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.4836401956808444e-05, | |
| "loss": 3.8714, | |
| "step": 315392 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.482803238810165e-05, | |
| "loss": 3.8558, | |
| "step": 315904 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.481964644059113e-05, | |
| "loss": 3.8614, | |
| "step": 316416 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.481126049308061e-05, | |
| "loss": 3.8525, | |
| "step": 316928 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.480287454557009e-05, | |
| "loss": 3.8561, | |
| "step": 317440 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.479450497686331e-05, | |
| "loss": 3.8466, | |
| "step": 317952 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.478611902935279e-05, | |
| "loss": 3.8556, | |
| "step": 318464 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.477773308184227e-05, | |
| "loss": 3.8514, | |
| "step": 318976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.476934713433175e-05, | |
| "loss": 3.8448, | |
| "step": 319488 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.476097756562496e-05, | |
| "loss": 3.8405, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.475259161811444e-05, | |
| "loss": 3.8438, | |
| "step": 320512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.474420567060392e-05, | |
| "loss": 3.8511, | |
| "step": 321024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4735836101897126e-05, | |
| "loss": 3.8545, | |
| "step": 321536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4727450154386606e-05, | |
| "loss": 3.8505, | |
| "step": 322048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4719064206876086e-05, | |
| "loss": 3.8533, | |
| "step": 322560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4710678259365566e-05, | |
| "loss": 3.8387, | |
| "step": 323072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4702292311855046e-05, | |
| "loss": 3.8461, | |
| "step": 323584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4693906364344526e-05, | |
| "loss": 3.8431, | |
| "step": 324096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4685520416834006e-05, | |
| "loss": 3.8338, | |
| "step": 324608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4677134469323486e-05, | |
| "loss": 3.8408, | |
| "step": 325120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4668764900616695e-05, | |
| "loss": 3.8351, | |
| "step": 325632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4660378953106175e-05, | |
| "loss": 3.847, | |
| "step": 326144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4651993005595655e-05, | |
| "loss": 3.8489, | |
| "step": 326656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.464362343688887e-05, | |
| "loss": 3.8442, | |
| "step": 327168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4635237489378344e-05, | |
| "loss": 3.8358, | |
| "step": 327680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4626851541867824e-05, | |
| "loss": 3.8504, | |
| "step": 328192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4618465594357304e-05, | |
| "loss": 3.8324, | |
| "step": 328704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4610079646846784e-05, | |
| "loss": 3.8458, | |
| "step": 329216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.460171007814e-05, | |
| "loss": 3.838, | |
| "step": 329728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.459332413062948e-05, | |
| "loss": 3.8229, | |
| "step": 330240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.458493818311896e-05, | |
| "loss": 3.8477, | |
| "step": 330752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.457656861441217e-05, | |
| "loss": 3.8371, | |
| "step": 331264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.456818266690165e-05, | |
| "loss": 3.8354, | |
| "step": 331776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.455979671939113e-05, | |
| "loss": 3.831, | |
| "step": 332288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.455141077188061e-05, | |
| "loss": 3.8288, | |
| "step": 332800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.454304120317382e-05, | |
| "loss": 3.8257, | |
| "step": 333312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.45346552556633e-05, | |
| "loss": 3.8375, | |
| "step": 333824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.452626930815278e-05, | |
| "loss": 3.8373, | |
| "step": 334336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.451788336064226e-05, | |
| "loss": 3.8365, | |
| "step": 334848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.450951379193547e-05, | |
| "loss": 3.8422, | |
| "step": 335360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4501127844424954e-05, | |
| "loss": 3.8306, | |
| "step": 335872 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4492741896914433e-05, | |
| "loss": 3.8184, | |
| "step": 336384 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4484355949403913e-05, | |
| "loss": 3.8401, | |
| "step": 336896 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.447598638069712e-05, | |
| "loss": 3.8232, | |
| "step": 337408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.44676004331866e-05, | |
| "loss": 3.8165, | |
| "step": 337920 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.445921448567608e-05, | |
| "loss": 3.8426, | |
| "step": 338432 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.445082853816556e-05, | |
| "loss": 3.8343, | |
| "step": 338944 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.444245896945877e-05, | |
| "loss": 3.8263, | |
| "step": 339456 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.443407302194825e-05, | |
| "loss": 3.8228, | |
| "step": 339968 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.442568707443773e-05, | |
| "loss": 3.8134, | |
| "step": 340480 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.441730112692721e-05, | |
| "loss": 3.825, | |
| "step": 340992 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.440893155822042e-05, | |
| "loss": 3.8403, | |
| "step": 341504 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.44005456107099e-05, | |
| "loss": 3.8326, | |
| "step": 342016 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.439215966319939e-05, | |
| "loss": 3.8358, | |
| "step": 342528 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.438377371568887e-05, | |
| "loss": 3.8293, | |
| "step": 343040 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4375404146982076e-05, | |
| "loss": 3.8459, | |
| "step": 343552 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4367018199471556e-05, | |
| "loss": 3.8215, | |
| "step": 344064 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4358632251961036e-05, | |
| "loss": 3.8344, | |
| "step": 344576 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4350246304450516e-05, | |
| "loss": 3.8293, | |
| "step": 345088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4341876735743725e-05, | |
| "loss": 3.8098, | |
| "step": 345600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4333490788233205e-05, | |
| "loss": 3.8406, | |
| "step": 346112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4325104840722685e-05, | |
| "loss": 3.8351, | |
| "step": 346624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4316718893212165e-05, | |
| "loss": 3.8314, | |
| "step": 347136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4308349324505374e-05, | |
| "loss": 3.8213, | |
| "step": 347648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4299963376994854e-05, | |
| "loss": 3.8099, | |
| "step": 348160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.429157742948434e-05, | |
| "loss": 3.8259, | |
| "step": 348672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.428320786077755e-05, | |
| "loss": 3.8224, | |
| "step": 349184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.427482191326703e-05, | |
| "loss": 3.8309, | |
| "step": 349696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.426643596575651e-05, | |
| "loss": 3.8223, | |
| "step": 350208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.425805001824599e-05, | |
| "loss": 3.8257, | |
| "step": 350720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.424966407073547e-05, | |
| "loss": 3.8216, | |
| "step": 351232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.424127812322495e-05, | |
| "loss": 3.8206, | |
| "step": 351744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.423289217571443e-05, | |
| "loss": 3.8193, | |
| "step": 352256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.422452260700764e-05, | |
| "loss": 3.8172, | |
| "step": 352768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.421613665949712e-05, | |
| "loss": 3.8323, | |
| "step": 353280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.42077507119866e-05, | |
| "loss": 3.8329, | |
| "step": 353792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.419936476447608e-05, | |
| "loss": 3.8374, | |
| "step": 354304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4190995195769294e-05, | |
| "loss": 3.8094, | |
| "step": 354816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4182609248258774e-05, | |
| "loss": 3.825, | |
| "step": 355328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4174223300748254e-05, | |
| "loss": 3.8219, | |
| "step": 355840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4165837353237734e-05, | |
| "loss": 3.8247, | |
| "step": 356352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4157467784530943e-05, | |
| "loss": 3.8219, | |
| "step": 356864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4149081837020423e-05, | |
| "loss": 3.8166, | |
| "step": 357376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.41406958895099e-05, | |
| "loss": 3.8154, | |
| "step": 357888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.413230994199938e-05, | |
| "loss": 3.8294, | |
| "step": 358400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.412394037329259e-05, | |
| "loss": 3.8035, | |
| "step": 358912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.411555442578207e-05, | |
| "loss": 3.8174, | |
| "step": 359424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.410716847827155e-05, | |
| "loss": 3.8115, | |
| "step": 359936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.409878253076103e-05, | |
| "loss": 3.8206, | |
| "step": 360448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.409041296205425e-05, | |
| "loss": 3.8125, | |
| "step": 360960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.408202701454373e-05, | |
| "loss": 3.824, | |
| "step": 361472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.407364106703321e-05, | |
| "loss": 3.8217, | |
| "step": 361984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.406525511952269e-05, | |
| "loss": 3.8131, | |
| "step": 362496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.40568855508159e-05, | |
| "loss": 3.8252, | |
| "step": 363008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.404849960330538e-05, | |
| "loss": 3.8131, | |
| "step": 363520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.404011365579486e-05, | |
| "loss": 3.8264, | |
| "step": 364032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.403172770828434e-05, | |
| "loss": 3.8281, | |
| "step": 364544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4023358139577546e-05, | |
| "loss": 3.817, | |
| "step": 365056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4014972192067026e-05, | |
| "loss": 3.8145, | |
| "step": 365568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4006586244556506e-05, | |
| "loss": 3.8168, | |
| "step": 366080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3998200297045986e-05, | |
| "loss": 3.8113, | |
| "step": 366592 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.39898307283392e-05, | |
| "loss": 3.8137, | |
| "step": 367104 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.398144478082868e-05, | |
| "loss": 3.817, | |
| "step": 367616 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.397305883331816e-05, | |
| "loss": 3.804, | |
| "step": 368128 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.396467288580764e-05, | |
| "loss": 3.8237, | |
| "step": 368640 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.395630331710085e-05, | |
| "loss": 3.8193, | |
| "step": 369152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.394791736959033e-05, | |
| "loss": 3.8199, | |
| "step": 369664 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.393953142207981e-05, | |
| "loss": 3.8051, | |
| "step": 370176 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.393114547456929e-05, | |
| "loss": 3.8111, | |
| "step": 370688 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.39227759058625e-05, | |
| "loss": 3.8103, | |
| "step": 371200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.391438995835198e-05, | |
| "loss": 3.8197, | |
| "step": 371712 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.390600401084146e-05, | |
| "loss": 3.8226, | |
| "step": 372224 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.389761806333094e-05, | |
| "loss": 3.8139, | |
| "step": 372736 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3889248494624155e-05, | |
| "loss": 3.7994, | |
| "step": 373248 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3880862547113635e-05, | |
| "loss": 3.8197, | |
| "step": 373760 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3872476599603115e-05, | |
| "loss": 3.8103, | |
| "step": 374272 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3864090652092595e-05, | |
| "loss": 3.8227, | |
| "step": 374784 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3855721083385804e-05, | |
| "loss": 3.805, | |
| "step": 375296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3847335135875284e-05, | |
| "loss": 3.8204, | |
| "step": 375808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3838949188364764e-05, | |
| "loss": 3.8159, | |
| "step": 376320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3830563240854244e-05, | |
| "loss": 3.8217, | |
| "step": 376832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.3822193672147453e-05, | |
| "loss": 3.8135, | |
| "step": 377344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.381380772463693e-05, | |
| "loss": 3.8125, | |
| "step": 377856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.380543815593014e-05, | |
| "loss": 3.8064, | |
| "step": 378368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.379705220841962e-05, | |
| "loss": 3.811, | |
| "step": 378880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.378866626090911e-05, | |
| "loss": 3.815, | |
| "step": 379392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.378028031339859e-05, | |
| "loss": 3.8113, | |
| "step": 379904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.377189436588807e-05, | |
| "loss": 3.8064, | |
| "step": 380416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.376350841837754e-05, | |
| "loss": 3.81, | |
| "step": 380928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.375512247086702e-05, | |
| "loss": 3.8119, | |
| "step": 381440 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.906362533569336, | |
| "eval_runtime": 307.2937, | |
| "eval_samples_per_second": 1241.78, | |
| "eval_steps_per_second": 38.807, | |
| "step": 381600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.37467365233565e-05, | |
| "loss": 3.7979, | |
| "step": 381952 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.373835057584598e-05, | |
| "loss": 3.7982, | |
| "step": 382464 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.372996462833546e-05, | |
| "loss": 3.8167, | |
| "step": 382976 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.372157868082494e-05, | |
| "loss": 3.8041, | |
| "step": 383488 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.371319273331442e-05, | |
| "loss": 3.8176, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.37048067858039e-05, | |
| "loss": 3.8029, | |
| "step": 384512 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.369642083829338e-05, | |
| "loss": 3.8066, | |
| "step": 385024 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.368803489078286e-05, | |
| "loss": 3.8054, | |
| "step": 385536 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.367968170087981e-05, | |
| "loss": 3.8036, | |
| "step": 386048 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.367129575336929e-05, | |
| "loss": 3.8043, | |
| "step": 386560 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.366290980585877e-05, | |
| "loss": 3.8082, | |
| "step": 387072 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.365452385834825e-05, | |
| "loss": 3.8081, | |
| "step": 387584 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3646154289641456e-05, | |
| "loss": 3.8025, | |
| "step": 388096 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3637768342130936e-05, | |
| "loss": 3.795, | |
| "step": 388608 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3629382394620416e-05, | |
| "loss": 3.8022, | |
| "step": 389120 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3620996447109896e-05, | |
| "loss": 3.7877, | |
| "step": 389632 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3612610499599376e-05, | |
| "loss": 3.7975, | |
| "step": 390144 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3604224552088856e-05, | |
| "loss": 3.7994, | |
| "step": 390656 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3595838604578336e-05, | |
| "loss": 3.7985, | |
| "step": 391168 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3587452657067816e-05, | |
| "loss": 3.82, | |
| "step": 391680 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3579066709557295e-05, | |
| "loss": 3.8066, | |
| "step": 392192 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3570680762046775e-05, | |
| "loss": 3.8101, | |
| "step": 392704 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3562294814536255e-05, | |
| "loss": 3.8009, | |
| "step": 393216 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.355392524582947e-05, | |
| "loss": 3.8044, | |
| "step": 393728 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.354553929831895e-05, | |
| "loss": 3.7964, | |
| "step": 394240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.353715335080843e-05, | |
| "loss": 3.8039, | |
| "step": 394752 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3528767403297904e-05, | |
| "loss": 3.7966, | |
| "step": 395264 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3520381455787384e-05, | |
| "loss": 3.7955, | |
| "step": 395776 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3511995508276864e-05, | |
| "loss": 3.7909, | |
| "step": 396288 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.3503609560766344e-05, | |
| "loss": 3.7919, | |
| "step": 396800 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3495223613255824e-05, | |
| "loss": 3.7975, | |
| "step": 397312 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.348685404454903e-05, | |
| "loss": 3.8055, | |
| "step": 397824 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.347846809703851e-05, | |
| "loss": 3.799, | |
| "step": 398336 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.347008214952799e-05, | |
| "loss": 3.8064, | |
| "step": 398848 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.346169620201748e-05, | |
| "loss": 3.7874, | |
| "step": 399360 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.345332663331069e-05, | |
| "loss": 3.7977, | |
| "step": 399872 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.344494068580017e-05, | |
| "loss": 3.7916, | |
| "step": 400384 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.343655473828965e-05, | |
| "loss": 3.7854, | |
| "step": 400896 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.342816879077913e-05, | |
| "loss": 3.7931, | |
| "step": 401408 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.341979922207234e-05, | |
| "loss": 3.7797, | |
| "step": 401920 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.341141327456182e-05, | |
| "loss": 3.7955, | |
| "step": 402432 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.34030273270513e-05, | |
| "loss": 3.7982, | |
| "step": 402944 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.339464137954078e-05, | |
| "loss": 3.7986, | |
| "step": 403456 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.338628818963772e-05, | |
| "loss": 3.7892, | |
| "step": 403968 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.33779022421272e-05, | |
| "loss": 3.8023, | |
| "step": 404480 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.336951629461668e-05, | |
| "loss": 3.781, | |
| "step": 404992 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.336113034710616e-05, | |
| "loss": 3.7983, | |
| "step": 405504 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.335274439959564e-05, | |
| "loss": 3.7871, | |
| "step": 406016 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.334435845208512e-05, | |
| "loss": 3.7668, | |
| "step": 406528 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.33359725045746e-05, | |
| "loss": 3.8028, | |
| "step": 407040 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.332760293586781e-05, | |
| "loss": 3.7853, | |
| "step": 407552 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.331921698835729e-05, | |
| "loss": 3.793, | |
| "step": 408064 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.331083104084677e-05, | |
| "loss": 3.7798, | |
| "step": 408576 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.330244509333625e-05, | |
| "loss": 3.7801, | |
| "step": 409088 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.329405914582573e-05, | |
| "loss": 3.7797, | |
| "step": 409600 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.328567319831521e-05, | |
| "loss": 3.7836, | |
| "step": 410112 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.327728725080469e-05, | |
| "loss": 3.7907, | |
| "step": 410624 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.32689176820979e-05, | |
| "loss": 3.7854, | |
| "step": 411136 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.326053173458739e-05, | |
| "loss": 3.7979, | |
| "step": 411648 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.325214578707687e-05, | |
| "loss": 3.7806, | |
| "step": 412160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.324375983956635e-05, | |
| "loss": 3.7706, | |
| "step": 412672 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3235390270859556e-05, | |
| "loss": 3.7899, | |
| "step": 413184 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3227004323349036e-05, | |
| "loss": 3.7708, | |
| "step": 413696 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3218618375838516e-05, | |
| "loss": 3.7707, | |
| "step": 414208 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3210232428327996e-05, | |
| "loss": 3.7913, | |
| "step": 414720 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3201862859621205e-05, | |
| "loss": 3.7877, | |
| "step": 415232 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3193493290914414e-05, | |
| "loss": 3.776, | |
| "step": 415744 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3185107343403894e-05, | |
| "loss": 3.7757, | |
| "step": 416256 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3176721395893374e-05, | |
| "loss": 3.7667, | |
| "step": 416768 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3168335448382854e-05, | |
| "loss": 3.7769, | |
| "step": 417280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.315994950087234e-05, | |
| "loss": 3.7922, | |
| "step": 417792 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.315156355336182e-05, | |
| "loss": 3.7868, | |
| "step": 418304 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.31431776058513e-05, | |
| "loss": 3.7883, | |
| "step": 418816 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.313480803714451e-05, | |
| "loss": 3.7824, | |
| "step": 419328 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.312642208963399e-05, | |
| "loss": 3.7976, | |
| "step": 419840 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.311803614212347e-05, | |
| "loss": 3.7737, | |
| "step": 420352 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.310965019461295e-05, | |
| "loss": 3.7882, | |
| "step": 420864 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.310126424710243e-05, | |
| "loss": 3.7797, | |
| "step": 421376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.309287829959191e-05, | |
| "loss": 3.7639, | |
| "step": 421888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.308449235208139e-05, | |
| "loss": 3.7922, | |
| "step": 422400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.30761227833746e-05, | |
| "loss": 3.7865, | |
| "step": 422912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.306773683586408e-05, | |
| "loss": 3.7835, | |
| "step": 423424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.305935088835356e-05, | |
| "loss": 3.7776, | |
| "step": 423936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.305096494084304e-05, | |
| "loss": 3.7668, | |
| "step": 424448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3042595372136254e-05, | |
| "loss": 3.7724, | |
| "step": 424960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3034225803429464e-05, | |
| "loss": 3.7775, | |
| "step": 425472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3025839855918944e-05, | |
| "loss": 3.785, | |
| "step": 425984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3017453908408424e-05, | |
| "loss": 3.7728, | |
| "step": 426496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.3009067960897903e-05, | |
| "loss": 3.7809, | |
| "step": 427008 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.3000682013387383e-05, | |
| "loss": 3.7722, | |
| "step": 427520 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.299231244468059e-05, | |
| "loss": 3.7767, | |
| "step": 428032 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.298392649717007e-05, | |
| "loss": 3.7707, | |
| "step": 428544 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.297554054965955e-05, | |
| "loss": 3.7738, | |
| "step": 429056 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.296715460214903e-05, | |
| "loss": 3.7857, | |
| "step": 429568 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.295876865463851e-05, | |
| "loss": 3.7898, | |
| "step": 430080 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.295038270712799e-05, | |
| "loss": 3.7897, | |
| "step": 430592 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.294199675961747e-05, | |
| "loss": 3.7611, | |
| "step": 431104 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.293361081210695e-05, | |
| "loss": 3.7802, | |
| "step": 431616 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.292524124340016e-05, | |
| "loss": 3.7705, | |
| "step": 432128 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.291687167469338e-05, | |
| "loss": 3.7834, | |
| "step": 432640 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.290848572718286e-05, | |
| "loss": 3.7722, | |
| "step": 433152 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.290009977967234e-05, | |
| "loss": 3.7757, | |
| "step": 433664 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.289171383216181e-05, | |
| "loss": 3.7661, | |
| "step": 434176 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.288332788465129e-05, | |
| "loss": 3.7814, | |
| "step": 434688 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.287494193714077e-05, | |
| "loss": 3.7592, | |
| "step": 435200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.286655598963025e-05, | |
| "loss": 3.7776, | |
| "step": 435712 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.285817004211973e-05, | |
| "loss": 3.7624, | |
| "step": 436224 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2849800473412946e-05, | |
| "loss": 3.7805, | |
| "step": 436736 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2841414525902426e-05, | |
| "loss": 3.77, | |
| "step": 437248 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2833028578391906e-05, | |
| "loss": 3.7732, | |
| "step": 437760 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2824642630881386e-05, | |
| "loss": 3.7745, | |
| "step": 438272 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2816273062174595e-05, | |
| "loss": 3.7735, | |
| "step": 438784 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2807887114664075e-05, | |
| "loss": 3.7777, | |
| "step": 439296 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2799501167153555e-05, | |
| "loss": 3.7702, | |
| "step": 439808 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2791115219643035e-05, | |
| "loss": 3.7757, | |
| "step": 440320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2782729272132515e-05, | |
| "loss": 3.7892, | |
| "step": 440832 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2774343324621995e-05, | |
| "loss": 3.7709, | |
| "step": 441344 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2765957377111475e-05, | |
| "loss": 3.7692, | |
| "step": 441856 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2757587808404684e-05, | |
| "loss": 3.7742, | |
| "step": 442368 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2749201860894164e-05, | |
| "loss": 3.7695, | |
| "step": 442880 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.274081591338365e-05, | |
| "loss": 3.7629, | |
| "step": 443392 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.273242996587313e-05, | |
| "loss": 3.774, | |
| "step": 443904 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.272404401836261e-05, | |
| "loss": 3.7591, | |
| "step": 444416 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.271565807085209e-05, | |
| "loss": 3.7801, | |
| "step": 444928 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.27072885021453e-05, | |
| "loss": 3.7779, | |
| "step": 445440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.269890255463478e-05, | |
| "loss": 3.7733, | |
| "step": 445952 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.269051660712426e-05, | |
| "loss": 3.7647, | |
| "step": 446464 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.268213065961374e-05, | |
| "loss": 3.7636, | |
| "step": 446976 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.267374471210322e-05, | |
| "loss": 3.7679, | |
| "step": 447488 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.26653587645927e-05, | |
| "loss": 3.7768, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.265697281708217e-05, | |
| "loss": 3.7715, | |
| "step": 448512 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.264858686957165e-05, | |
| "loss": 3.7762, | |
| "step": 449024 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.264021730086487e-05, | |
| "loss": 3.7525, | |
| "step": 449536 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.263183135335435e-05, | |
| "loss": 3.776, | |
| "step": 450048 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.262344540584383e-05, | |
| "loss": 3.7684, | |
| "step": 450560 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2615075837137044e-05, | |
| "loss": 3.7763, | |
| "step": 451072 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2606689889626524e-05, | |
| "loss": 3.7598, | |
| "step": 451584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2598303942116e-05, | |
| "loss": 3.7785, | |
| "step": 452096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.258991799460548e-05, | |
| "loss": 3.7706, | |
| "step": 452608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.258154842589869e-05, | |
| "loss": 3.7804, | |
| "step": 453120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.257316247838817e-05, | |
| "loss": 3.7641, | |
| "step": 453632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2564776530877646e-05, | |
| "loss": 3.7756, | |
| "step": 454144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2556390583367126e-05, | |
| "loss": 3.7558, | |
| "step": 454656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2548004635856606e-05, | |
| "loss": 3.7694, | |
| "step": 455168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.2539618688346086e-05, | |
| "loss": 3.7729, | |
| "step": 455680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.25312491196393e-05, | |
| "loss": 3.7642, | |
| "step": 456192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.252286317212878e-05, | |
| "loss": 3.7684, | |
| "step": 456704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.251447722461826e-05, | |
| "loss": 3.7614, | |
| "step": 457216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.250609127710774e-05, | |
| "loss": 3.7702, | |
| "step": 457728 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.891617774963379, | |
| "eval_runtime": 303.681, | |
| "eval_samples_per_second": 1256.552, | |
| "eval_steps_per_second": 39.268, | |
| "step": 457920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.249770532959722e-05, | |
| "loss": 3.7651, | |
| "step": 458240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.248933576089043e-05, | |
| "loss": 3.7546, | |
| "step": 458752 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.248094981337991e-05, | |
| "loss": 3.7721, | |
| "step": 459264 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.247256386586939e-05, | |
| "loss": 3.7623, | |
| "step": 459776 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.246417791835887e-05, | |
| "loss": 3.7743, | |
| "step": 460288 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.245580834965208e-05, | |
| "loss": 3.7604, | |
| "step": 460800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.244742240214156e-05, | |
| "loss": 3.7646, | |
| "step": 461312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.243903645463104e-05, | |
| "loss": 3.7651, | |
| "step": 461824 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.243065050712052e-05, | |
| "loss": 3.7618, | |
| "step": 462336 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2422264559610006e-05, | |
| "loss": 3.7615, | |
| "step": 462848 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2413878612099486e-05, | |
| "loss": 3.7642, | |
| "step": 463360 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2405492664588966e-05, | |
| "loss": 3.7663, | |
| "step": 463872 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2397123095882175e-05, | |
| "loss": 3.7623, | |
| "step": 464384 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2388737148371655e-05, | |
| "loss": 3.7543, | |
| "step": 464896 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2380351200861135e-05, | |
| "loss": 3.7602, | |
| "step": 465408 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2371965253350615e-05, | |
| "loss": 3.7443, | |
| "step": 465920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2363579305840095e-05, | |
| "loss": 3.7554, | |
| "step": 466432 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2355193358329575e-05, | |
| "loss": 3.7572, | |
| "step": 466944 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2346807410819055e-05, | |
| "loss": 3.7541, | |
| "step": 467456 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.233842146330853e-05, | |
| "loss": 3.7789, | |
| "step": 467968 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.2330051894601744e-05, | |
| "loss": 3.767, | |
| "step": 468480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.232168232589496e-05, | |
| "loss": 3.7693, | |
| "step": 468992 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.231329637838444e-05, | |
| "loss": 3.7559, | |
| "step": 469504 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.230491043087392e-05, | |
| "loss": 3.76, | |
| "step": 470016 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.22965244833634e-05, | |
| "loss": 3.7562, | |
| "step": 470528 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.228813853585288e-05, | |
| "loss": 3.7624, | |
| "step": 471040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.227975258834236e-05, | |
| "loss": 3.7572, | |
| "step": 471552 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.227136664083183e-05, | |
| "loss": 3.759, | |
| "step": 472064 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.226298069332131e-05, | |
| "loss": 3.7465, | |
| "step": 472576 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.225461112461453e-05, | |
| "loss": 3.7516, | |
| "step": 473088 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2246225177104e-05, | |
| "loss": 3.7535, | |
| "step": 473600 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.223785560839722e-05, | |
| "loss": 3.7651, | |
| "step": 474112 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.22294696608867e-05, | |
| "loss": 3.7606, | |
| "step": 474624 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.222108371337618e-05, | |
| "loss": 3.7651, | |
| "step": 475136 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.221269776586566e-05, | |
| "loss": 3.7469, | |
| "step": 475648 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.220431181835514e-05, | |
| "loss": 3.7528, | |
| "step": 476160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.219592587084462e-05, | |
| "loss": 3.7552, | |
| "step": 476672 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2187556302137833e-05, | |
| "loss": 3.7461, | |
| "step": 477184 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.217917035462731e-05, | |
| "loss": 3.7499, | |
| "step": 477696 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2170784407116787e-05, | |
| "loss": 3.7442, | |
| "step": 478208 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2162398459606267e-05, | |
| "loss": 3.7535, | |
| "step": 478720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2154012512095746e-05, | |
| "loss": 3.7597, | |
| "step": 479232 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2145626564585226e-05, | |
| "loss": 3.7557, | |
| "step": 479744 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2137256995878436e-05, | |
| "loss": 3.7509, | |
| "step": 480256 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2128871048367915e-05, | |
| "loss": 3.7568, | |
| "step": 480768 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.2120485100857395e-05, | |
| "loss": 3.7472, | |
| "step": 481280 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.211209915334688e-05, | |
| "loss": 3.7548, | |
| "step": 481792 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.210371320583636e-05, | |
| "loss": 3.7471, | |
| "step": 482304 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.209532725832584e-05, | |
| "loss": 3.7267, | |
| "step": 482816 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.208694131081532e-05, | |
| "loss": 3.7685, | |
| "step": 483328 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.207857174210853e-05, | |
| "loss": 3.7438, | |
| "step": 483840 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.207018579459801e-05, | |
| "loss": 3.7544, | |
| "step": 484352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.206179984708749e-05, | |
| "loss": 3.7364, | |
| "step": 484864 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.205341389957697e-05, | |
| "loss": 3.744, | |
| "step": 485376 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.204504433087018e-05, | |
| "loss": 3.7326, | |
| "step": 485888 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.203665838335966e-05, | |
| "loss": 3.7465, | |
| "step": 486400 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.202827243584914e-05, | |
| "loss": 3.7494, | |
| "step": 486912 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.201988648833862e-05, | |
| "loss": 3.7491, | |
| "step": 487424 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.20115005408281e-05, | |
| "loss": 3.7581, | |
| "step": 487936 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.200311459331758e-05, | |
| "loss": 3.7428, | |
| "step": 488448 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1994745024610796e-05, | |
| "loss": 3.7313, | |
| "step": 488960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1986359077100276e-05, | |
| "loss": 3.7561, | |
| "step": 489472 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1977973129589756e-05, | |
| "loss": 3.7306, | |
| "step": 489984 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1969587182079236e-05, | |
| "loss": 3.7289, | |
| "step": 490496 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1961201234568716e-05, | |
| "loss": 3.7516, | |
| "step": 491008 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1952831665861925e-05, | |
| "loss": 3.75, | |
| "step": 491520 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1944445718351405e-05, | |
| "loss": 3.7353, | |
| "step": 492032 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1936059770840885e-05, | |
| "loss": 3.7399, | |
| "step": 492544 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1927673823330365e-05, | |
| "loss": 3.7285, | |
| "step": 493056 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1919304254623574e-05, | |
| "loss": 3.7366, | |
| "step": 493568 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1910918307113054e-05, | |
| "loss": 3.7483, | |
| "step": 494080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1902532359602534e-05, | |
| "loss": 3.747, | |
| "step": 494592 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1894146412092014e-05, | |
| "loss": 3.7519, | |
| "step": 495104 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1885760464581493e-05, | |
| "loss": 3.7473, | |
| "step": 495616 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.187739089587471e-05, | |
| "loss": 3.7552, | |
| "step": 496128 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.186900494836419e-05, | |
| "loss": 3.7343, | |
| "step": 496640 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.186061900085366e-05, | |
| "loss": 3.7516, | |
| "step": 497152 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.185223305334314e-05, | |
| "loss": 3.737, | |
| "step": 497664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.184384710583262e-05, | |
| "loss": 3.7306, | |
| "step": 498176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.18354611583221e-05, | |
| "loss": 3.7501, | |
| "step": 498688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.182707521081158e-05, | |
| "loss": 3.7514, | |
| "step": 499200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.181868926330106e-05, | |
| "loss": 3.7462, | |
| "step": 499712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.181031969459427e-05, | |
| "loss": 3.7412, | |
| "step": 500224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.180193374708375e-05, | |
| "loss": 3.7282, | |
| "step": 500736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.179356417837697e-05, | |
| "loss": 3.7313, | |
| "step": 501248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.178517823086645e-05, | |
| "loss": 3.7427, | |
| "step": 501760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.177679228335593e-05, | |
| "loss": 3.7479, | |
| "step": 502272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.176840633584541e-05, | |
| "loss": 3.7354, | |
| "step": 502784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.1760036767138616e-05, | |
| "loss": 3.7438, | |
| "step": 503296 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1751650819628096e-05, | |
| "loss": 3.7289, | |
| "step": 503808 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1743264872117576e-05, | |
| "loss": 3.7422, | |
| "step": 504320 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1734878924607056e-05, | |
| "loss": 3.7337, | |
| "step": 504832 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1726492977096536e-05, | |
| "loss": 3.7366, | |
| "step": 505344 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1718107029586016e-05, | |
| "loss": 3.7468, | |
| "step": 505856 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1709737460879225e-05, | |
| "loss": 3.7524, | |
| "step": 506368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1701351513368705e-05, | |
| "loss": 3.7508, | |
| "step": 506880 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.169296556585819e-05, | |
| "loss": 3.7265, | |
| "step": 507392 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.168457961834767e-05, | |
| "loss": 3.7425, | |
| "step": 507904 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.167619367083715e-05, | |
| "loss": 3.7314, | |
| "step": 508416 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.166782410213036e-05, | |
| "loss": 3.7448, | |
| "step": 508928 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.165943815461984e-05, | |
| "loss": 3.7349, | |
| "step": 509440 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.165105220710932e-05, | |
| "loss": 3.7405, | |
| "step": 509952 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.16426662595988e-05, | |
| "loss": 3.7307, | |
| "step": 510464 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.163428031208828e-05, | |
| "loss": 3.741, | |
| "step": 510976 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.162591074338149e-05, | |
| "loss": 3.7255, | |
| "step": 511488 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.161752479587097e-05, | |
| "loss": 3.7387, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.160913884836045e-05, | |
| "loss": 3.724, | |
| "step": 512512 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.160075290084993e-05, | |
| "loss": 3.7435, | |
| "step": 513024 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1592383332143145e-05, | |
| "loss": 3.733, | |
| "step": 513536 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1583997384632625e-05, | |
| "loss": 3.7346, | |
| "step": 514048 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1575611437122105e-05, | |
| "loss": 3.7352, | |
| "step": 514560 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1567225489611585e-05, | |
| "loss": 3.7378, | |
| "step": 515072 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1558855920904794e-05, | |
| "loss": 3.7384, | |
| "step": 515584 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1550469973394274e-05, | |
| "loss": 3.7333, | |
| "step": 516096 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1542084025883754e-05, | |
| "loss": 3.7422, | |
| "step": 516608 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1533698078373234e-05, | |
| "loss": 3.7476, | |
| "step": 517120 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1525312130862714e-05, | |
| "loss": 3.7402, | |
| "step": 517632 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1516926183352194e-05, | |
| "loss": 3.7334, | |
| "step": 518144 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.15085566146454e-05, | |
| "loss": 3.7325, | |
| "step": 518656 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.150017066713488e-05, | |
| "loss": 3.7369, | |
| "step": 519168 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.149178471962436e-05, | |
| "loss": 3.7268, | |
| "step": 519680 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.148339877211384e-05, | |
| "loss": 3.7378, | |
| "step": 520192 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.147502920340706e-05, | |
| "loss": 3.7198, | |
| "step": 520704 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.146664325589654e-05, | |
| "loss": 3.7428, | |
| "step": 521216 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.145825730838602e-05, | |
| "loss": 3.7397, | |
| "step": 521728 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.14498713608755e-05, | |
| "loss": 3.7377, | |
| "step": 522240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.144148541336497e-05, | |
| "loss": 3.7298, | |
| "step": 522752 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.143311584465819e-05, | |
| "loss": 3.7272, | |
| "step": 523264 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.142472989714767e-05, | |
| "loss": 3.7312, | |
| "step": 523776 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.141634394963715e-05, | |
| "loss": 3.7435, | |
| "step": 524288 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.140795800212662e-05, | |
| "loss": 3.7344, | |
| "step": 524800 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.13995720546161e-05, | |
| "loss": 3.7368, | |
| "step": 525312 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.139120248590932e-05, | |
| "loss": 3.7194, | |
| "step": 525824 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.13828165383988e-05, | |
| "loss": 3.7408, | |
| "step": 526336 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.137443059088828e-05, | |
| "loss": 3.732, | |
| "step": 526848 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.136604464337776e-05, | |
| "loss": 3.743, | |
| "step": 527360 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.135767507467097e-05, | |
| "loss": 3.7229, | |
| "step": 527872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1349289127160446e-05, | |
| "loss": 3.7413, | |
| "step": 528384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1340903179649926e-05, | |
| "loss": 3.7313, | |
| "step": 528896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1332517232139406e-05, | |
| "loss": 3.7506, | |
| "step": 529408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1324131284628886e-05, | |
| "loss": 3.7289, | |
| "step": 529920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1315761715922095e-05, | |
| "loss": 3.7369, | |
| "step": 530432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1307375768411575e-05, | |
| "loss": 3.7232, | |
| "step": 530944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1298989820901055e-05, | |
| "loss": 3.7333, | |
| "step": 531456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.1290603873390535e-05, | |
| "loss": 3.7354, | |
| "step": 531968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.128221792588002e-05, | |
| "loss": 3.7263, | |
| "step": 532480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.127384835717323e-05, | |
| "loss": 3.7391, | |
| "step": 532992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.126546240966271e-05, | |
| "loss": 3.7218, | |
| "step": 533504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.125707646215219e-05, | |
| "loss": 3.7381, | |
| "step": 534016 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.879366636276245, | |
| "eval_runtime": 311.5372, | |
| "eval_samples_per_second": 1224.865, | |
| "eval_steps_per_second": 38.278, | |
| "step": 534240 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.124869051464167e-05, | |
| "loss": 3.7263, | |
| "step": 534528 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.124030456713115e-05, | |
| "loss": 3.7176, | |
| "step": 535040 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.123191861962063e-05, | |
| "loss": 3.7335, | |
| "step": 535552 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.122353267211011e-05, | |
| "loss": 3.7263, | |
| "step": 536064 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.121514672459959e-05, | |
| "loss": 3.7403, | |
| "step": 536576 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.120676077708907e-05, | |
| "loss": 3.7272, | |
| "step": 537088 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.119839120838228e-05, | |
| "loss": 3.7265, | |
| "step": 537600 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.119000526087176e-05, | |
| "loss": 3.7298, | |
| "step": 538112 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.118161931336124e-05, | |
| "loss": 3.7256, | |
| "step": 538624 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.117323336585072e-05, | |
| "loss": 3.7278, | |
| "step": 539136 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.11648474183402e-05, | |
| "loss": 3.729, | |
| "step": 539648 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1156461470829686e-05, | |
| "loss": 3.7315, | |
| "step": 540160 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1148091902122895e-05, | |
| "loss": 3.7308, | |
| "step": 540672 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1139705954612375e-05, | |
| "loss": 3.717, | |
| "step": 541184 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1131320007101855e-05, | |
| "loss": 3.7212, | |
| "step": 541696 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1122934059591335e-05, | |
| "loss": 3.7128, | |
| "step": 542208 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.111454811208081e-05, | |
| "loss": 3.7195, | |
| "step": 542720 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.110616216457029e-05, | |
| "loss": 3.7254, | |
| "step": 543232 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.109777621705977e-05, | |
| "loss": 3.7155, | |
| "step": 543744 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1089406648352984e-05, | |
| "loss": 3.7437, | |
| "step": 544256 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.108102070084246e-05, | |
| "loss": 3.7345, | |
| "step": 544768 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.107263475333194e-05, | |
| "loss": 3.7353, | |
| "step": 545280 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.1064248805821423e-05, | |
| "loss": 3.7222, | |
| "step": 545792 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.105587923711463e-05, | |
| "loss": 3.7238, | |
| "step": 546304 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.104749328960411e-05, | |
| "loss": 3.7294, | |
| "step": 546816 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.103910734209359e-05, | |
| "loss": 3.724, | |
| "step": 547328 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.103072139458307e-05, | |
| "loss": 3.7237, | |
| "step": 547840 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.102235182587628e-05, | |
| "loss": 3.7295, | |
| "step": 548352 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.101396587836576e-05, | |
| "loss": 3.7088, | |
| "step": 548864 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.100557993085524e-05, | |
| "loss": 3.7164, | |
| "step": 549376 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.099719398334472e-05, | |
| "loss": 3.7197, | |
| "step": 549888 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.09888080358342e-05, | |
| "loss": 3.7274, | |
| "step": 550400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.098043846712741e-05, | |
| "loss": 3.7286, | |
| "step": 550912 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.097205251961689e-05, | |
| "loss": 3.7304, | |
| "step": 551424 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.096366657210638e-05, | |
| "loss": 3.7143, | |
| "step": 551936 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.095528062459586e-05, | |
| "loss": 3.7166, | |
| "step": 552448 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.094689467708534e-05, | |
| "loss": 3.7232, | |
| "step": 552960 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0938525108378546e-05, | |
| "loss": 3.7115, | |
| "step": 553472 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0930139160868026e-05, | |
| "loss": 3.7175, | |
| "step": 553984 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0921753213357506e-05, | |
| "loss": 3.706, | |
| "step": 554496 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0913367265846986e-05, | |
| "loss": 3.7216, | |
| "step": 555008 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0904981318336466e-05, | |
| "loss": 3.7183, | |
| "step": 555520 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0896611749629675e-05, | |
| "loss": 3.7222, | |
| "step": 556032 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0888225802119155e-05, | |
| "loss": 3.7215, | |
| "step": 556544 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0879839854608635e-05, | |
| "loss": 3.7201, | |
| "step": 557056 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0871453907098115e-05, | |
| "loss": 3.7151, | |
| "step": 557568 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0863067959587595e-05, | |
| "loss": 3.7194, | |
| "step": 558080 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.085469839088081e-05, | |
| "loss": 3.7124, | |
| "step": 558592 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.084631244337029e-05, | |
| "loss": 3.6952, | |
| "step": 559104 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.083792649585977e-05, | |
| "loss": 3.7333, | |
| "step": 559616 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.082954054834925e-05, | |
| "loss": 3.7086, | |
| "step": 560128 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.082117097964246e-05, | |
| "loss": 3.7235, | |
| "step": 560640 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.081278503213194e-05, | |
| "loss": 3.7081, | |
| "step": 561152 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.080439908462142e-05, | |
| "loss": 3.7054, | |
| "step": 561664 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.07960131371109e-05, | |
| "loss": 3.6983, | |
| "step": 562176 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.078762718960038e-05, | |
| "loss": 3.7118, | |
| "step": 562688 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.077925762089359e-05, | |
| "loss": 3.7201, | |
| "step": 563200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.077087167338307e-05, | |
| "loss": 3.7117, | |
| "step": 563712 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.076248572587255e-05, | |
| "loss": 3.7252, | |
| "step": 564224 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.075409977836203e-05, | |
| "loss": 3.7157, | |
| "step": 564736 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0745730209655244e-05, | |
| "loss": 3.6944, | |
| "step": 565248 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0737344262144724e-05, | |
| "loss": 3.718, | |
| "step": 565760 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0728958314634204e-05, | |
| "loss": 3.7011, | |
| "step": 566272 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0720572367123684e-05, | |
| "loss": 3.6951, | |
| "step": 566784 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0712186419613164e-05, | |
| "loss": 3.7236, | |
| "step": 567296 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.070381685090637e-05, | |
| "loss": 3.7162, | |
| "step": 567808 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.069543090339585e-05, | |
| "loss": 3.7018, | |
| "step": 568320 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.068704495588533e-05, | |
| "loss": 3.7078, | |
| "step": 568832 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.067865900837481e-05, | |
| "loss": 3.6962, | |
| "step": 569344 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.067028943966802e-05, | |
| "loss": 3.7046, | |
| "step": 569856 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.06619034921575e-05, | |
| "loss": 3.713, | |
| "step": 570368 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.065351754464698e-05, | |
| "loss": 3.714, | |
| "step": 570880 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.064513159713646e-05, | |
| "loss": 3.7186, | |
| "step": 571392 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.063674564962594e-05, | |
| "loss": 3.7103, | |
| "step": 571904 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.062835970211542e-05, | |
| "loss": 3.7217, | |
| "step": 572416 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.061999013340864e-05, | |
| "loss": 3.7009, | |
| "step": 572928 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.061160418589812e-05, | |
| "loss": 3.7207, | |
| "step": 573440 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.060321823838759e-05, | |
| "loss": 3.7041, | |
| "step": 573952 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.059483229087707e-05, | |
| "loss": 3.7017, | |
| "step": 574464 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.058646272217029e-05, | |
| "loss": 3.7168, | |
| "step": 574976 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.057807677465976e-05, | |
| "loss": 3.7196, | |
| "step": 575488 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.056969082714924e-05, | |
| "loss": 3.7128, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.056130487963872e-05, | |
| "loss": 3.7116, | |
| "step": 576512 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0552935310931936e-05, | |
| "loss": 3.6916, | |
| "step": 577024 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0544549363421416e-05, | |
| "loss": 3.6971, | |
| "step": 577536 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0536163415910896e-05, | |
| "loss": 3.7092, | |
| "step": 578048 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0527777468400376e-05, | |
| "loss": 3.7158, | |
| "step": 578560 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0519391520889856e-05, | |
| "loss": 3.7048, | |
| "step": 579072 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.0511021952183065e-05, | |
| "loss": 3.7121, | |
| "step": 579584 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0502636004672545e-05, | |
| "loss": 3.6973, | |
| "step": 580096 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0494250057162025e-05, | |
| "loss": 3.712, | |
| "step": 580608 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0485864109651505e-05, | |
| "loss": 3.7022, | |
| "step": 581120 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0477478162140985e-05, | |
| "loss": 3.7043, | |
| "step": 581632 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0469092214630465e-05, | |
| "loss": 3.7133, | |
| "step": 582144 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0460722645923674e-05, | |
| "loss": 3.7172, | |
| "step": 582656 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0452336698413154e-05, | |
| "loss": 3.7194, | |
| "step": 583168 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.044395075090264e-05, | |
| "loss": 3.6971, | |
| "step": 583680 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.043556480339212e-05, | |
| "loss": 3.7079, | |
| "step": 584192 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.04271788558816e-05, | |
| "loss": 3.6994, | |
| "step": 584704 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.041880928717481e-05, | |
| "loss": 3.7111, | |
| "step": 585216 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.041042333966429e-05, | |
| "loss": 3.7037, | |
| "step": 585728 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.040203739215377e-05, | |
| "loss": 3.7105, | |
| "step": 586240 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.039365144464325e-05, | |
| "loss": 3.699, | |
| "step": 586752 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.038526549713273e-05, | |
| "loss": 3.7047, | |
| "step": 587264 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.037689592842594e-05, | |
| "loss": 3.6993, | |
| "step": 587776 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.036850998091542e-05, | |
| "loss": 3.7039, | |
| "step": 588288 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.03601240334049e-05, | |
| "loss": 3.6914, | |
| "step": 588800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.035173808589438e-05, | |
| "loss": 3.7119, | |
| "step": 589312 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0343368517187594e-05, | |
| "loss": 3.702, | |
| "step": 589824 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0334982569677074e-05, | |
| "loss": 3.7038, | |
| "step": 590336 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0326596622166554e-05, | |
| "loss": 3.7027, | |
| "step": 590848 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0318210674656034e-05, | |
| "loss": 3.7055, | |
| "step": 591360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.030984110594924e-05, | |
| "loss": 3.7097, | |
| "step": 591872 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.030145515843872e-05, | |
| "loss": 3.7026, | |
| "step": 592384 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.02930692109282e-05, | |
| "loss": 3.7088, | |
| "step": 592896 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.028468326341768e-05, | |
| "loss": 3.715, | |
| "step": 593408 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.027631369471089e-05, | |
| "loss": 3.702, | |
| "step": 593920 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.026792774720037e-05, | |
| "loss": 3.7101, | |
| "step": 594432 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.025954179968985e-05, | |
| "loss": 3.7015, | |
| "step": 594944 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.025115585217933e-05, | |
| "loss": 3.7001, | |
| "step": 595456 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.024278628347255e-05, | |
| "loss": 3.7022, | |
| "step": 595968 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.023440033596203e-05, | |
| "loss": 3.705, | |
| "step": 596480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.022601438845151e-05, | |
| "loss": 3.6867, | |
| "step": 596992 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.021762844094099e-05, | |
| "loss": 3.7106, | |
| "step": 597504 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0209258872234197e-05, | |
| "loss": 3.7079, | |
| "step": 598016 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0200872924723677e-05, | |
| "loss": 3.71, | |
| "step": 598528 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0192486977213157e-05, | |
| "loss": 3.6986, | |
| "step": 599040 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0184101029702636e-05, | |
| "loss": 3.6954, | |
| "step": 599552 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0175715082192116e-05, | |
| "loss": 3.6985, | |
| "step": 600064 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0167345513485326e-05, | |
| "loss": 3.7093, | |
| "step": 600576 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0158959565974805e-05, | |
| "loss": 3.7031, | |
| "step": 601088 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0150573618464285e-05, | |
| "loss": 3.7095, | |
| "step": 601600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0142187670953765e-05, | |
| "loss": 3.6865, | |
| "step": 602112 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.013381810224698e-05, | |
| "loss": 3.7087, | |
| "step": 602624 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.012543215473646e-05, | |
| "loss": 3.6997, | |
| "step": 603136 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.011704620722594e-05, | |
| "loss": 3.7126, | |
| "step": 603648 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.010866025971542e-05, | |
| "loss": 3.6952, | |
| "step": 604160 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0100274312204894e-05, | |
| "loss": 3.7061, | |
| "step": 604672 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.009190474349811e-05, | |
| "loss": 3.6997, | |
| "step": 605184 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.008351879598759e-05, | |
| "loss": 3.7206, | |
| "step": 605696 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.007513284847707e-05, | |
| "loss": 3.7001, | |
| "step": 606208 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.006674690096654e-05, | |
| "loss": 3.7044, | |
| "step": 606720 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.005837733225976e-05, | |
| "loss": 3.6963, | |
| "step": 607232 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.004999138474924e-05, | |
| "loss": 3.704, | |
| "step": 607744 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.004160543723872e-05, | |
| "loss": 3.6998, | |
| "step": 608256 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.00332194897282e-05, | |
| "loss": 3.696, | |
| "step": 608768 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0024849921021415e-05, | |
| "loss": 3.7091, | |
| "step": 609280 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0016463973510895e-05, | |
| "loss": 3.6878, | |
| "step": 609792 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.000807802600037e-05, | |
| "loss": 3.7121, | |
| "step": 610304 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 3.87300968170166, | |
| "eval_runtime": 304.8147, | |
| "eval_samples_per_second": 1251.878, | |
| "eval_steps_per_second": 39.122, | |
| "step": 610560 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.999969207848985e-05, | |
| "loss": 3.6966, | |
| "step": 610816 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.999130613097933e-05, | |
| "loss": 3.6902, | |
| "step": 611328 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9982936562272544e-05, | |
| "loss": 3.7024, | |
| "step": 611840 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.997455061476202e-05, | |
| "loss": 3.6982, | |
| "step": 612352 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.99661646672515e-05, | |
| "loss": 3.7101, | |
| "step": 612864 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.995777871974098e-05, | |
| "loss": 3.697, | |
| "step": 613376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.994940915103419e-05, | |
| "loss": 3.7004, | |
| "step": 613888 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.994102320352367e-05, | |
| "loss": 3.6986, | |
| "step": 614400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.993263725601315e-05, | |
| "loss": 3.6946, | |
| "step": 614912 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.992425130850263e-05, | |
| "loss": 3.6995, | |
| "step": 615424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.991588173979584e-05, | |
| "loss": 3.699, | |
| "step": 615936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.990749579228532e-05, | |
| "loss": 3.7005, | |
| "step": 616448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.989912622357854e-05, | |
| "loss": 3.7064, | |
| "step": 616960 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.989074027606802e-05, | |
| "loss": 3.6816, | |
| "step": 617472 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.988235432855749e-05, | |
| "loss": 3.694, | |
| "step": 617984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.987396838104697e-05, | |
| "loss": 3.6799, | |
| "step": 618496 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.986558243353645e-05, | |
| "loss": 3.6918, | |
| "step": 619008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.985719648602593e-05, | |
| "loss": 3.6944, | |
| "step": 619520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.984881053851541e-05, | |
| "loss": 3.6865, | |
| "step": 620032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.984042459100489e-05, | |
| "loss": 3.711, | |
| "step": 620544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.983203864349437e-05, | |
| "loss": 3.705, | |
| "step": 621056 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9823669074787586e-05, | |
| "loss": 3.7038, | |
| "step": 621568 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9815283127277066e-05, | |
| "loss": 3.6943, | |
| "step": 622080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9806897179766546e-05, | |
| "loss": 3.6947, | |
| "step": 622592 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9798511232256026e-05, | |
| "loss": 3.6997, | |
| "step": 623104 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9790125284745506e-05, | |
| "loss": 3.6926, | |
| "step": 623616 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9781755716038715e-05, | |
| "loss": 3.6923, | |
| "step": 624128 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9773369768528195e-05, | |
| "loss": 3.6992, | |
| "step": 624640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9764983821017675e-05, | |
| "loss": 3.6825, | |
| "step": 625152 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.9756597873507155e-05, | |
| "loss": 3.6838, | |
| "step": 625664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9748211925996635e-05, | |
| "loss": 3.6945, | |
| "step": 626176 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9739842357289844e-05, | |
| "loss": 3.6985, | |
| "step": 626688 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9731456409779324e-05, | |
| "loss": 3.6927, | |
| "step": 627200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.972307046226881e-05, | |
| "loss": 3.7062, | |
| "step": 627712 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.971468451475829e-05, | |
| "loss": 3.6858, | |
| "step": 628224 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.97063149460515e-05, | |
| "loss": 3.6875, | |
| "step": 628736 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.969792899854098e-05, | |
| "loss": 3.6935, | |
| "step": 629248 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.968954305103046e-05, | |
| "loss": 3.6815, | |
| "step": 629760 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.968115710351994e-05, | |
| "loss": 3.6899, | |
| "step": 630272 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.967278753481315e-05, | |
| "loss": 3.6813, | |
| "step": 630784 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.966440158730263e-05, | |
| "loss": 3.6878, | |
| "step": 631296 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.965601563979211e-05, | |
| "loss": 3.6915, | |
| "step": 631808 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.964762969228159e-05, | |
| "loss": 3.6934, | |
| "step": 632320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.96392601235748e-05, | |
| "loss": 3.6924, | |
| "step": 632832 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.963087417606428e-05, | |
| "loss": 3.689, | |
| "step": 633344 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9622488228553764e-05, | |
| "loss": 3.6866, | |
| "step": 633856 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9614102281043244e-05, | |
| "loss": 3.6881, | |
| "step": 634368 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9605732712336454e-05, | |
| "loss": 3.689, | |
| "step": 634880 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9597346764825933e-05, | |
| "loss": 3.6655, | |
| "step": 635392 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9588960817315413e-05, | |
| "loss": 3.6996, | |
| "step": 635904 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9580574869804893e-05, | |
| "loss": 3.6802, | |
| "step": 636416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.957218892229437e-05, | |
| "loss": 3.6951, | |
| "step": 636928 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.956381935358758e-05, | |
| "loss": 3.6798, | |
| "step": 637440 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.955543340607706e-05, | |
| "loss": 3.6765, | |
| "step": 637952 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.954704745856654e-05, | |
| "loss": 3.6752, | |
| "step": 638464 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.953866151105602e-05, | |
| "loss": 3.6781, | |
| "step": 638976 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.95302755635455e-05, | |
| "loss": 3.6959, | |
| "step": 639488 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.952190599483872e-05, | |
| "loss": 3.6788, | |
| "step": 640000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.95135200473282e-05, | |
| "loss": 3.6981, | |
| "step": 640512 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.950513409981768e-05, | |
| "loss": 3.6871, | |
| "step": 641024 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.949674815230715e-05, | |
| "loss": 3.6661, | |
| "step": 641536 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.948836220479663e-05, | |
| "loss": 3.6873, | |
| "step": 642048 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.947999263608985e-05, | |
| "loss": 3.6793, | |
| "step": 642560 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.947160668857933e-05, | |
| "loss": 3.6662, | |
| "step": 643072 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.94632207410688e-05, | |
| "loss": 3.6927, | |
| "step": 643584 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.945483479355828e-05, | |
| "loss": 3.6862, | |
| "step": 644096 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9446465224851496e-05, | |
| "loss": 3.6759, | |
| "step": 644608 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.943807927734097e-05, | |
| "loss": 3.6806, | |
| "step": 645120 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9429693329830456e-05, | |
| "loss": 3.6637, | |
| "step": 645632 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9421307382319936e-05, | |
| "loss": 3.6788, | |
| "step": 646144 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.941293781361315e-05, | |
| "loss": 3.6833, | |
| "step": 646656 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9404551866102625e-05, | |
| "loss": 3.6823, | |
| "step": 647168 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9396165918592105e-05, | |
| "loss": 3.6947, | |
| "step": 647680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9387779971081585e-05, | |
| "loss": 3.6798, | |
| "step": 648192 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.93794104023748e-05, | |
| "loss": 3.6926, | |
| "step": 648704 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9371024454864274e-05, | |
| "loss": 3.679, | |
| "step": 649216 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9362638507353754e-05, | |
| "loss": 3.69, | |
| "step": 649728 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9354252559843234e-05, | |
| "loss": 3.6726, | |
| "step": 650240 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.9345866612332714e-05, | |
| "loss": 3.673, | |
| "step": 650752 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.933749704362592e-05, | |
| "loss": 3.6889, | |
| "step": 651264 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.932911109611541e-05, | |
| "loss": 3.6928, | |
| "step": 651776 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.932072514860489e-05, | |
| "loss": 3.6833, | |
| "step": 652288 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.931233920109437e-05, | |
| "loss": 3.6856, | |
| "step": 652800 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.930396963238758e-05, | |
| "loss": 3.6606, | |
| "step": 653312 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.929558368487706e-05, | |
| "loss": 3.6698, | |
| "step": 653824 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.928719773736654e-05, | |
| "loss": 3.6835, | |
| "step": 654336 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.927881178985602e-05, | |
| "loss": 3.6854, | |
| "step": 654848 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.927044222114923e-05, | |
| "loss": 3.6764, | |
| "step": 655360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.926205627363871e-05, | |
| "loss": 3.6831, | |
| "step": 655872 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.925367032612819e-05, | |
| "loss": 3.672, | |
| "step": 656384 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.924528437861767e-05, | |
| "loss": 3.6806, | |
| "step": 656896 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9236914809910877e-05, | |
| "loss": 3.6736, | |
| "step": 657408 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.922852886240036e-05, | |
| "loss": 3.6782, | |
| "step": 657920 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.922014291488984e-05, | |
| "loss": 3.6863, | |
| "step": 658432 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.921175696737932e-05, | |
| "loss": 3.6898, | |
| "step": 658944 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.92033710198688e-05, | |
| "loss": 3.693, | |
| "step": 659456 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.919498507235828e-05, | |
| "loss": 3.6703, | |
| "step": 659968 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.918661550365149e-05, | |
| "loss": 3.681, | |
| "step": 660480 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.917822955614097e-05, | |
| "loss": 3.6756, | |
| "step": 660992 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.916984360863045e-05, | |
| "loss": 3.6823, | |
| "step": 661504 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.916145766111993e-05, | |
| "loss": 3.6766, | |
| "step": 662016 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.915308809241314e-05, | |
| "loss": 3.6799, | |
| "step": 662528 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.914470214490262e-05, | |
| "loss": 3.6745, | |
| "step": 663040 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.91363161973921e-05, | |
| "loss": 3.6742, | |
| "step": 663552 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.912793024988158e-05, | |
| "loss": 3.6733, | |
| "step": 664064 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.911954430237106e-05, | |
| "loss": 3.6755, | |
| "step": 664576 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.911117473366428e-05, | |
| "loss": 3.6636, | |
| "step": 665088 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.910278878615376e-05, | |
| "loss": 3.6835, | |
| "step": 665600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.909440283864324e-05, | |
| "loss": 3.6772, | |
| "step": 666112 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.908601689113272e-05, | |
| "loss": 3.6722, | |
| "step": 666624 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9077647322425926e-05, | |
| "loss": 3.6797, | |
| "step": 667136 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9069261374915406e-05, | |
| "loss": 3.6734, | |
| "step": 667648 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9060875427404886e-05, | |
| "loss": 3.6842, | |
| "step": 668160 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9052489479894366e-05, | |
| "loss": 3.6738, | |
| "step": 668672 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9044119911187575e-05, | |
| "loss": 3.6852, | |
| "step": 669184 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9035733963677055e-05, | |
| "loss": 3.6887, | |
| "step": 669696 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9027348016166535e-05, | |
| "loss": 3.6787, | |
| "step": 670208 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.9018962068656015e-05, | |
| "loss": 3.6789, | |
| "step": 670720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.901059249994923e-05, | |
| "loss": 3.6757, | |
| "step": 671232 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.900220655243871e-05, | |
| "loss": 3.6711, | |
| "step": 671744 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.899382060492819e-05, | |
| "loss": 3.6763, | |
| "step": 672256 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.898543465741767e-05, | |
| "loss": 3.6771, | |
| "step": 672768 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.897706508871088e-05, | |
| "loss": 3.6575, | |
| "step": 673280 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.896867914120036e-05, | |
| "loss": 3.6862, | |
| "step": 673792 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.896029319368984e-05, | |
| "loss": 3.6799, | |
| "step": 674304 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.895190724617932e-05, | |
| "loss": 3.6813, | |
| "step": 674816 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.894353767747253e-05, | |
| "loss": 3.6737, | |
| "step": 675328 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.893515172996201e-05, | |
| "loss": 3.6697, | |
| "step": 675840 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.892676578245149e-05, | |
| "loss": 3.6709, | |
| "step": 676352 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.891837983494097e-05, | |
| "loss": 3.681, | |
| "step": 676864 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.8910010266234184e-05, | |
| "loss": 3.6786, | |
| "step": 677376 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.8901624318723664e-05, | |
| "loss": 3.6848, | |
| "step": 677888 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.8893238371213144e-05, | |
| "loss": 3.656, | |
| "step": 678400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.8884852423702624e-05, | |
| "loss": 3.6832, | |
| "step": 678912 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.8876466476192104e-05, | |
| "loss": 3.6741, | |
| "step": 679424 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.886809690748531e-05, | |
| "loss": 3.6874, | |
| "step": 679936 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.885971095997479e-05, | |
| "loss": 3.6686, | |
| "step": 680448 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.885132501246427e-05, | |
| "loss": 3.6757, | |
| "step": 680960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.884293906495375e-05, | |
| "loss": 3.675, | |
| "step": 681472 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.883456949624696e-05, | |
| "loss": 3.6917, | |
| "step": 681984 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.882618354873644e-05, | |
| "loss": 3.6728, | |
| "step": 682496 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.881779760122592e-05, | |
| "loss": 3.6799, | |
| "step": 683008 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.88094116537154e-05, | |
| "loss": 3.6686, | |
| "step": 683520 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.880104208500862e-05, | |
| "loss": 3.6829, | |
| "step": 684032 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.87926561374981e-05, | |
| "loss": 3.6694, | |
| "step": 684544 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.878427018998758e-05, | |
| "loss": 3.6692, | |
| "step": 685056 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.877588424247705e-05, | |
| "loss": 3.6791, | |
| "step": 685568 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.876749829496653e-05, | |
| "loss": 3.6643, | |
| "step": 686080 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.875912872625975e-05, | |
| "loss": 3.6823, | |
| "step": 686592 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 3.8686816692352295, | |
| "eval_runtime": 311.4895, | |
| "eval_samples_per_second": 1225.053, | |
| "eval_steps_per_second": 38.284, | |
| "step": 686880 | |
| } | |
| ], | |
| "logging_steps": 512, | |
| "max_steps": 3052726, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 10, | |
| "total_flos": 4.754533877575373e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |