| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9999980231880047, | |
| "global_step": 252932, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 9.980231840969114e-05, | |
| "loss": 3.4601, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 9.960463681938229e-05, | |
| "loss": 3.3095, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.940695522907343e-05, | |
| "loss": 3.2164, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.920927363876458e-05, | |
| "loss": 3.1537, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.901159204845572e-05, | |
| "loss": 3.111, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.881391045814687e-05, | |
| "loss": 3.0745, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.8616228867838e-05, | |
| "loss": 3.042, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.841854727752914e-05, | |
| "loss": 3.031, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.822086568722029e-05, | |
| "loss": 3.0045, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.802318409691143e-05, | |
| "loss": 2.9784, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.782550250660258e-05, | |
| "loss": 2.9605, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.762782091629372e-05, | |
| "loss": 2.9598, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.743013932598487e-05, | |
| "loss": 2.9421, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.7232457735676e-05, | |
| "loss": 2.9273, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.703477614536714e-05, | |
| "loss": 2.9111, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.683709455505828e-05, | |
| "loss": 2.9034, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.663941296474943e-05, | |
| "loss": 2.8881, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.644173137444056e-05, | |
| "loss": 2.8827, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.62440497841317e-05, | |
| "loss": 2.8804, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.604636819382285e-05, | |
| "loss": 2.8645, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.584868660351399e-05, | |
| "loss": 2.8559, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.565100501320512e-05, | |
| "loss": 2.8503, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.545332342289628e-05, | |
| "loss": 2.8343, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.525564183258741e-05, | |
| "loss": 2.8318, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.505796024227856e-05, | |
| "loss": 2.833, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.48602786519697e-05, | |
| "loss": 2.8119, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.466259706166085e-05, | |
| "loss": 2.8128, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.446491547135199e-05, | |
| "loss": 2.8052, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.426723388104312e-05, | |
| "loss": 2.7941, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.406955229073427e-05, | |
| "loss": 2.794, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.387187070042541e-05, | |
| "loss": 2.7877, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.367418911011656e-05, | |
| "loss": 2.7838, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.34765075198077e-05, | |
| "loss": 2.7682, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.327882592949885e-05, | |
| "loss": 2.7722, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.308114433918999e-05, | |
| "loss": 2.7626, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.288346274888112e-05, | |
| "loss": 2.7602, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.268578115857227e-05, | |
| "loss": 2.7625, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.248809956826341e-05, | |
| "loss": 2.7533, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.229041797795456e-05, | |
| "loss": 2.7464, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.20927363876457e-05, | |
| "loss": 2.7335, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.189505479733683e-05, | |
| "loss": 2.7307, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.169737320702798e-05, | |
| "loss": 2.7359, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.149969161671912e-05, | |
| "loss": 2.7351, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.130201002641027e-05, | |
| "loss": 2.7193, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.110432843610141e-05, | |
| "loss": 2.7194, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.090664684579256e-05, | |
| "loss": 2.7235, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.07089652554837e-05, | |
| "loss": 2.7255, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.051128366517483e-05, | |
| "loss": 2.7085, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.031360207486598e-05, | |
| "loss": 2.7122, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.011592048455712e-05, | |
| "loss": 2.7037, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_loss": 2.510829210281372, | |
| "eval_runtime": 324.1549, | |
| "eval_samples_per_second": 308.495, | |
| "eval_steps_per_second": 38.562, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8.991823889424826e-05, | |
| "loss": 2.6986, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8.97205573039394e-05, | |
| "loss": 2.6976, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8.952287571363054e-05, | |
| "loss": 2.6908, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.932519412332168e-05, | |
| "loss": 2.7012, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.912751253301282e-05, | |
| "loss": 2.6839, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.892983094270397e-05, | |
| "loss": 2.6843, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.87321493523951e-05, | |
| "loss": 2.6721, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.853446776208626e-05, | |
| "loss": 2.6877, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.83367861717774e-05, | |
| "loss": 2.6742, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.813910458146854e-05, | |
| "loss": 2.6696, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.794142299115968e-05, | |
| "loss": 2.6703, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.774374140085082e-05, | |
| "loss": 2.6715, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.754605981054197e-05, | |
| "loss": 2.6629, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.73483782202331e-05, | |
| "loss": 2.6651, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.715069662992426e-05, | |
| "loss": 2.6481, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.695301503961539e-05, | |
| "loss": 2.656, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.675533344930654e-05, | |
| "loss": 2.6485, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.655765185899768e-05, | |
| "loss": 2.6547, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.635997026868882e-05, | |
| "loss": 2.6508, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.616228867837997e-05, | |
| "loss": 2.6453, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.59646070880711e-05, | |
| "loss": 2.6458, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.576692549776225e-05, | |
| "loss": 2.6443, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.556924390745339e-05, | |
| "loss": 2.6434, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.537156231714454e-05, | |
| "loss": 2.638, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.517388072683568e-05, | |
| "loss": 2.6238, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.497619913652682e-05, | |
| "loss": 2.6288, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.477851754621797e-05, | |
| "loss": 2.6309, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.45808359559091e-05, | |
| "loss": 2.6227, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.438315436560025e-05, | |
| "loss": 2.6248, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.418547277529139e-05, | |
| "loss": 2.6211, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.398779118498254e-05, | |
| "loss": 2.6227, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.379010959467368e-05, | |
| "loss": 2.6152, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.359242800436481e-05, | |
| "loss": 2.6184, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.339474641405595e-05, | |
| "loss": 2.6167, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.31970648237471e-05, | |
| "loss": 2.6119, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.299938323343824e-05, | |
| "loss": 2.619, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.280170164312938e-05, | |
| "loss": 2.6119, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.260402005282053e-05, | |
| "loss": 2.6075, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 8.240633846251166e-05, | |
| "loss": 2.6067, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 8.22086568722028e-05, | |
| "loss": 2.6144, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 8.201097528189395e-05, | |
| "loss": 2.5938, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 8.181329369158509e-05, | |
| "loss": 2.5939, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 8.161561210127624e-05, | |
| "loss": 2.596, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.141793051096737e-05, | |
| "loss": 2.5901, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.122024892065852e-05, | |
| "loss": 2.5888, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.102256733034966e-05, | |
| "loss": 2.5927, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.08248857400408e-05, | |
| "loss": 2.5894, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.062720414973195e-05, | |
| "loss": 2.5829, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 8.042952255942309e-05, | |
| "loss": 2.5738, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 8.023184096911424e-05, | |
| "loss": 2.5794, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "eval_loss": 2.399911880493164, | |
| "eval_runtime": 187.1469, | |
| "eval_samples_per_second": 534.34, | |
| "eval_steps_per_second": 66.792, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 8.003415937880537e-05, | |
| "loss": 2.5825, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.983647778849652e-05, | |
| "loss": 2.5859, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.963879619818766e-05, | |
| "loss": 2.5812, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.94411146078788e-05, | |
| "loss": 2.5763, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.924343301756995e-05, | |
| "loss": 2.5803, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.904575142726108e-05, | |
| "loss": 2.5743, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.884806983695224e-05, | |
| "loss": 2.572, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.865038824664337e-05, | |
| "loss": 2.5717, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.845270665633452e-05, | |
| "loss": 2.5614, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.825502506602566e-05, | |
| "loss": 2.5678, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.80573434757168e-05, | |
| "loss": 2.5539, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.785966188540795e-05, | |
| "loss": 2.5632, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.766198029509908e-05, | |
| "loss": 2.5617, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.746429870479023e-05, | |
| "loss": 2.5533, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.726661711448137e-05, | |
| "loss": 2.5501, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.706893552417251e-05, | |
| "loss": 2.5566, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.687125393386365e-05, | |
| "loss": 2.5597, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.66735723435548e-05, | |
| "loss": 2.5513, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.647589075324593e-05, | |
| "loss": 2.5424, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.627820916293707e-05, | |
| "loss": 2.5553, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.608052757262822e-05, | |
| "loss": 2.5458, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.588284598231936e-05, | |
| "loss": 2.5371, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.56851643920105e-05, | |
| "loss": 2.5474, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.548748280170164e-05, | |
| "loss": 2.539, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.528980121139278e-05, | |
| "loss": 2.5432, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.509211962108393e-05, | |
| "loss": 2.5308, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.489443803077507e-05, | |
| "loss": 2.5442, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.469675644046622e-05, | |
| "loss": 2.5345, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.449907485015736e-05, | |
| "loss": 2.5381, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.430139325984849e-05, | |
| "loss": 2.5296, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.410371166953964e-05, | |
| "loss": 2.5322, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.390603007923078e-05, | |
| "loss": 2.5296, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.370834848892193e-05, | |
| "loss": 2.5342, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.351066689861307e-05, | |
| "loss": 2.526, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.331298530830422e-05, | |
| "loss": 2.521, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.311530371799535e-05, | |
| "loss": 2.521, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.291762212768649e-05, | |
| "loss": 2.5145, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.271994053737764e-05, | |
| "loss": 2.5204, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.252225894706878e-05, | |
| "loss": 2.5146, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.232457735675993e-05, | |
| "loss": 2.5164, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.212689576645107e-05, | |
| "loss": 2.5156, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.192921417614222e-05, | |
| "loss": 2.5119, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.173153258583335e-05, | |
| "loss": 2.5172, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.153385099552449e-05, | |
| "loss": 2.4984, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.133616940521564e-05, | |
| "loss": 2.5078, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.113848781490678e-05, | |
| "loss": 2.5097, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.094080622459793e-05, | |
| "loss": 2.5116, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.074312463428906e-05, | |
| "loss": 2.5024, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.05454430439802e-05, | |
| "loss": 2.5133, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 7.034776145367135e-05, | |
| "loss": 2.5061, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "eval_loss": 2.319611072540283, | |
| "eval_runtime": 187.4687, | |
| "eval_samples_per_second": 533.422, | |
| "eval_steps_per_second": 66.678, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 7.015007986336249e-05, | |
| "loss": 2.4962, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.995239827305363e-05, | |
| "loss": 2.4973, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.975471668274476e-05, | |
| "loss": 2.497, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.955703509243591e-05, | |
| "loss": 2.5022, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.935935350212705e-05, | |
| "loss": 2.4994, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.91616719118182e-05, | |
| "loss": 2.4915, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.896399032150934e-05, | |
| "loss": 2.4979, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.876630873120047e-05, | |
| "loss": 2.4881, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.856862714089163e-05, | |
| "loss": 2.4981, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.837094555058276e-05, | |
| "loss": 2.4873, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.817326396027391e-05, | |
| "loss": 2.4886, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.797558236996505e-05, | |
| "loss": 2.4869, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.77779007796562e-05, | |
| "loss": 2.4893, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.758021918934734e-05, | |
| "loss": 2.4847, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.738253759903847e-05, | |
| "loss": 2.4856, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.718485600872962e-05, | |
| "loss": 2.4811, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.698717441842076e-05, | |
| "loss": 2.4909, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.678949282811191e-05, | |
| "loss": 2.477, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.659181123780305e-05, | |
| "loss": 2.4754, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.63941296474942e-05, | |
| "loss": 2.4826, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.619644805718534e-05, | |
| "loss": 2.4672, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.599876646687647e-05, | |
| "loss": 2.4728, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.580108487656762e-05, | |
| "loss": 2.4758, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.560340328625876e-05, | |
| "loss": 2.471, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.540572169594991e-05, | |
| "loss": 2.4684, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.520804010564105e-05, | |
| "loss": 2.47, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.50103585153322e-05, | |
| "loss": 2.4677, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.481267692502333e-05, | |
| "loss": 2.4696, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.461499533471447e-05, | |
| "loss": 2.4647, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.441731374440562e-05, | |
| "loss": 2.4599, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.421963215409676e-05, | |
| "loss": 2.4605, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.402195056378791e-05, | |
| "loss": 2.4648, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.382426897347905e-05, | |
| "loss": 2.4569, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.362658738317018e-05, | |
| "loss": 2.4677, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.342890579286132e-05, | |
| "loss": 2.4602, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.323122420255247e-05, | |
| "loss": 2.4582, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.303354261224361e-05, | |
| "loss": 2.4601, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.283586102193474e-05, | |
| "loss": 2.4598, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.26381794316259e-05, | |
| "loss": 2.4597, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.244049784131703e-05, | |
| "loss": 2.4466, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.224281625100817e-05, | |
| "loss": 2.4488, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.204513466069932e-05, | |
| "loss": 2.4557, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.184745307039046e-05, | |
| "loss": 2.4555, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.16497714800816e-05, | |
| "loss": 2.4514, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.145208988977274e-05, | |
| "loss": 2.4436, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.12544082994639e-05, | |
| "loss": 2.4497, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.105672670915503e-05, | |
| "loss": 2.4527, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.085904511884617e-05, | |
| "loss": 2.4448, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.066136352853732e-05, | |
| "loss": 2.4407, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0463681938228455e-05, | |
| "loss": 2.4431, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "eval_loss": 2.261350393295288, | |
| "eval_runtime": 191.0766, | |
| "eval_samples_per_second": 523.35, | |
| "eval_steps_per_second": 65.419, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0266000347919605e-05, | |
| "loss": 2.441, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.006831875761074e-05, | |
| "loss": 2.4422, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 5.987063716730189e-05, | |
| "loss": 2.4335, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 5.967295557699303e-05, | |
| "loss": 2.4346, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 5.9475273986684166e-05, | |
| "loss": 2.4278, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 5.927759239637532e-05, | |
| "loss": 2.4443, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 5.9079910806066454e-05, | |
| "loss": 2.4433, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 5.88822292157576e-05, | |
| "loss": 2.4365, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 5.868454762544874e-05, | |
| "loss": 2.425, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.8486866035139885e-05, | |
| "loss": 2.4187, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.828918444483102e-05, | |
| "loss": 2.4294, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.8091502854522165e-05, | |
| "loss": 2.425, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.789382126421331e-05, | |
| "loss": 2.4356, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.7696139673904446e-05, | |
| "loss": 2.425, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.7498458083595596e-05, | |
| "loss": 2.4223, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.730077649328673e-05, | |
| "loss": 2.4156, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.7103094902977883e-05, | |
| "loss": 2.4187, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.690541331266902e-05, | |
| "loss": 2.4212, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.670773172236016e-05, | |
| "loss": 2.4218, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.651005013205131e-05, | |
| "loss": 2.417, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.6312368541742445e-05, | |
| "loss": 2.416, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.6114686951433595e-05, | |
| "loss": 2.4142, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.591700536112473e-05, | |
| "loss": 2.4197, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.5719323770815876e-05, | |
| "loss": 2.4167, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.552164218050702e-05, | |
| "loss": 2.4136, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.5323960590198156e-05, | |
| "loss": 2.409, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.51262789998893e-05, | |
| "loss": 2.4108, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.492859740958044e-05, | |
| "loss": 2.4148, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.473091581927159e-05, | |
| "loss": 2.4143, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.4533234228962724e-05, | |
| "loss": 2.4139, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.4335552638653875e-05, | |
| "loss": 2.4114, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.413787104834501e-05, | |
| "loss": 2.4064, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.394018945803615e-05, | |
| "loss": 2.41, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.37425078677273e-05, | |
| "loss": 2.3971, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.3544826277418436e-05, | |
| "loss": 2.4097, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.3347144687109586e-05, | |
| "loss": 2.3987, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.314946309680072e-05, | |
| "loss": 2.4049, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.2951781506491873e-05, | |
| "loss": 2.4072, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.275409991618301e-05, | |
| "loss": 2.3864, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.255641832587415e-05, | |
| "loss": 2.3971, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.23587367355653e-05, | |
| "loss": 2.397, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.2161055145256435e-05, | |
| "loss": 2.3943, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.196337355494758e-05, | |
| "loss": 2.395, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.1765691964638715e-05, | |
| "loss": 2.3998, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.1568010374329866e-05, | |
| "loss": 2.3948, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.1370328784021e-05, | |
| "loss": 2.3955, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.117264719371214e-05, | |
| "loss": 2.39, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.097496560340329e-05, | |
| "loss": 2.3935, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.077728401309443e-05, | |
| "loss": 2.3905, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.057960242278558e-05, | |
| "loss": 2.3815, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "eval_loss": 2.2064666748046875, | |
| "eval_runtime": 190.7509, | |
| "eval_samples_per_second": 524.244, | |
| "eval_steps_per_second": 65.53, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.0381920832476714e-05, | |
| "loss": 2.3912, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.0184239242167865e-05, | |
| "loss": 2.3816, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.9986557651859e-05, | |
| "loss": 2.3818, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.9788876061550145e-05, | |
| "loss": 2.3937, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.959119447124129e-05, | |
| "loss": 2.376, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.9393512880932426e-05, | |
| "loss": 2.3803, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.919583129062357e-05, | |
| "loss": 2.3896, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.899814970031471e-05, | |
| "loss": 2.3814, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.880046811000586e-05, | |
| "loss": 2.3786, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.8602786519696994e-05, | |
| "loss": 2.3813, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.840510492938814e-05, | |
| "loss": 2.381, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.820742333907928e-05, | |
| "loss": 2.3753, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.800974174877042e-05, | |
| "loss": 2.3792, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.781206015846156e-05, | |
| "loss": 2.3823, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.7614378568152705e-05, | |
| "loss": 2.3755, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.741669697784385e-05, | |
| "loss": 2.3744, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.721901538753499e-05, | |
| "loss": 2.3698, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.7021333797226136e-05, | |
| "loss": 2.3671, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.682365220691728e-05, | |
| "loss": 2.3682, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.662597061660842e-05, | |
| "loss": 2.3754, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.642828902629956e-05, | |
| "loss": 2.3734, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.6230607435990704e-05, | |
| "loss": 2.3701, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.603292584568185e-05, | |
| "loss": 2.3612, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.583524425537299e-05, | |
| "loss": 2.3665, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.5637562665064135e-05, | |
| "loss": 2.3608, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.543988107475527e-05, | |
| "loss": 2.3632, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.5242199484446416e-05, | |
| "loss": 2.3652, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.504451789413755e-05, | |
| "loss": 2.3618, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.4846836303828696e-05, | |
| "loss": 2.3618, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.464915471351984e-05, | |
| "loss": 2.3665, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.4451473123210984e-05, | |
| "loss": 2.3625, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.425379153290213e-05, | |
| "loss": 2.3635, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.4056109942593264e-05, | |
| "loss": 2.365, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.385842835228441e-05, | |
| "loss": 2.3606, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.366074676197555e-05, | |
| "loss": 2.3566, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.3463065171666695e-05, | |
| "loss": 2.3492, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.326538358135784e-05, | |
| "loss": 2.3548, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.306770199104898e-05, | |
| "loss": 2.3466, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.2870020400740126e-05, | |
| "loss": 2.3456, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.267233881043126e-05, | |
| "loss": 2.3461, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.247465722012241e-05, | |
| "loss": 2.35, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.227697562981355e-05, | |
| "loss": 2.3541, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.2079294039504694e-05, | |
| "loss": 2.3475, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.188161244919583e-05, | |
| "loss": 2.3557, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.1683930858886975e-05, | |
| "loss": 2.3398, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.148624926857812e-05, | |
| "loss": 2.3409, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.1288567678269255e-05, | |
| "loss": 2.3476, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.10908860879604e-05, | |
| "loss": 2.3381, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.089320449765154e-05, | |
| "loss": 2.3415, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.0695522907342686e-05, | |
| "loss": 2.3467, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "eval_loss": 2.168158531188965, | |
| "eval_runtime": 189.775, | |
| "eval_samples_per_second": 526.94, | |
| "eval_steps_per_second": 65.867, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.049784131703383e-05, | |
| "loss": 2.337, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.0300159726724974e-05, | |
| "loss": 2.3416, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.010247813641612e-05, | |
| "loss": 2.3464, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.9904796546107254e-05, | |
| "loss": 2.3419, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.97071149557984e-05, | |
| "loss": 2.3455, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.950943336548954e-05, | |
| "loss": 2.341, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.9311751775180685e-05, | |
| "loss": 2.3386, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.911407018487183e-05, | |
| "loss": 2.3374, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.891638859456297e-05, | |
| "loss": 2.3338, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.871870700425411e-05, | |
| "loss": 2.3376, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.852102541394525e-05, | |
| "loss": 2.3428, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.832334382363639e-05, | |
| "loss": 2.3384, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.8125662233327534e-05, | |
| "loss": 2.3302, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.792798064301868e-05, | |
| "loss": 2.3278, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.773029905270982e-05, | |
| "loss": 2.3283, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.7532617462400965e-05, | |
| "loss": 2.3339, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.733493587209211e-05, | |
| "loss": 2.327, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.7137254281783245e-05, | |
| "loss": 2.3264, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.693957269147439e-05, | |
| "loss": 2.3307, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.674189110116553e-05, | |
| "loss": 2.331, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.6544209510856676e-05, | |
| "loss": 2.3277, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.634652792054782e-05, | |
| "loss": 2.3251, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.6148846330238964e-05, | |
| "loss": 2.3175, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.59511647399301e-05, | |
| "loss": 2.3192, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.5753483149621244e-05, | |
| "loss": 2.3174, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.555580155931239e-05, | |
| "loss": 2.3218, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.5358119969003525e-05, | |
| "loss": 2.3192, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.516043837869467e-05, | |
| "loss": 2.3215, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.496275678838581e-05, | |
| "loss": 2.3203, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.4765075198076956e-05, | |
| "loss": 2.3133, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.456739360776809e-05, | |
| "loss": 2.312, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.4369712017459236e-05, | |
| "loss": 2.3161, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.417203042715038e-05, | |
| "loss": 2.3173, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.3974348836841524e-05, | |
| "loss": 2.3089, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.377666724653267e-05, | |
| "loss": 2.309, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.357898565622381e-05, | |
| "loss": 2.3217, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.3381304065914955e-05, | |
| "loss": 2.3093, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.318362247560609e-05, | |
| "loss": 2.3054, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.2985940885297235e-05, | |
| "loss": 2.3117, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.278825929498838e-05, | |
| "loss": 2.31, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.259057770467952e-05, | |
| "loss": 2.3081, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.2392896114370666e-05, | |
| "loss": 2.3143, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.21952145240618e-05, | |
| "loss": 2.3133, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.199753293375295e-05, | |
| "loss": 2.3057, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.1799851343444084e-05, | |
| "loss": 2.3143, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.160216975313523e-05, | |
| "loss": 2.303, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.140448816282637e-05, | |
| "loss": 2.308, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.1206806572517515e-05, | |
| "loss": 2.3083, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.100912498220866e-05, | |
| "loss": 2.2954, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.08114433918998e-05, | |
| "loss": 2.3085, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "eval_loss": 2.1400139331817627, | |
| "eval_runtime": 190.1966, | |
| "eval_samples_per_second": 525.772, | |
| "eval_steps_per_second": 65.721, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.0613761801590946e-05, | |
| "loss": 2.2955, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.0416080211282083e-05, | |
| "loss": 2.2923, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.0218398620973226e-05, | |
| "loss": 2.3059, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.002071703066437e-05, | |
| "loss": 2.2998, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.9823035440355514e-05, | |
| "loss": 2.2972, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.9625353850046654e-05, | |
| "loss": 2.2964, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.9427672259737798e-05, | |
| "loss": 2.2996, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.9229990669428935e-05, | |
| "loss": 2.2992, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.9032309079120078e-05, | |
| "loss": 2.2903, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.8834627488811222e-05, | |
| "loss": 2.2928, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.8636945898502366e-05, | |
| "loss": 2.3004, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.843926430819351e-05, | |
| "loss": 2.3031, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.8241582717884653e-05, | |
| "loss": 2.2985, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.8043901127575793e-05, | |
| "loss": 2.2885, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.7846219537266933e-05, | |
| "loss": 2.2954, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.7648537946958074e-05, | |
| "loss": 2.2983, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.7450856356649217e-05, | |
| "loss": 2.2888, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.725317476634036e-05, | |
| "loss": 2.2814, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.7055493176031505e-05, | |
| "loss": 2.282, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.685781158572265e-05, | |
| "loss": 2.2866, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.6660129995413792e-05, | |
| "loss": 2.2841, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.646244840510493e-05, | |
| "loss": 2.2944, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.6264766814796073e-05, | |
| "loss": 2.2881, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.6067085224487213e-05, | |
| "loss": 2.282, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.5869403634178357e-05, | |
| "loss": 2.2832, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.56717220438695e-05, | |
| "loss": 2.2799, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.5474040453560644e-05, | |
| "loss": 2.2819, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.5276358863251788e-05, | |
| "loss": 2.2857, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.5078677272942925e-05, | |
| "loss": 2.2719, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.488099568263407e-05, | |
| "loss": 2.2748, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.4683314092325212e-05, | |
| "loss": 2.2778, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.4485632502016352e-05, | |
| "loss": 2.283, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.4287950911707496e-05, | |
| "loss": 2.2724, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.4090269321398636e-05, | |
| "loss": 2.283, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.389258773108978e-05, | |
| "loss": 2.2799, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.3694906140780923e-05, | |
| "loss": 2.2738, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.3497224550472064e-05, | |
| "loss": 2.2766, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.3299542960163207e-05, | |
| "loss": 2.2735, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.310186136985435e-05, | |
| "loss": 2.2787, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.290417977954549e-05, | |
| "loss": 2.2754, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.270649818923663e-05, | |
| "loss": 2.2672, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.2508816598927775e-05, | |
| "loss": 2.2776, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.231113500861892e-05, | |
| "loss": 2.2706, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.211345341831006e-05, | |
| "loss": 2.2713, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.1915771828001203e-05, | |
| "loss": 2.2693, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.1718090237692347e-05, | |
| "loss": 2.2736, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.152040864738349e-05, | |
| "loss": 2.2696, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.132272705707463e-05, | |
| "loss": 2.2696, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.112504546676577e-05, | |
| "loss": 2.2663, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.0927363876456915e-05, | |
| "loss": 2.266, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "eval_loss": 2.1008613109588623, | |
| "eval_runtime": 187.6272, | |
| "eval_samples_per_second": 532.972, | |
| "eval_steps_per_second": 66.621, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.0729682286148055e-05, | |
| "loss": 2.2594, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.05320006958392e-05, | |
| "loss": 2.2658, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0334319105530342e-05, | |
| "loss": 2.2681, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0136637515221486e-05, | |
| "loss": 2.2669, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9938955924912626e-05, | |
| "loss": 2.2609, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.974127433460377e-05, | |
| "loss": 2.2639, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.954359274429491e-05, | |
| "loss": 2.2602, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.934591115398605e-05, | |
| "loss": 2.2636, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.9148229563677194e-05, | |
| "loss": 2.2611, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8950547973368338e-05, | |
| "loss": 2.2573, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8752866383059478e-05, | |
| "loss": 2.2539, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.855518479275062e-05, | |
| "loss": 2.2621, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.8357503202441765e-05, | |
| "loss": 2.2614, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.815982161213291e-05, | |
| "loss": 2.2549, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.796214002182405e-05, | |
| "loss": 2.247, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.776445843151519e-05, | |
| "loss": 2.243, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7566776841206333e-05, | |
| "loss": 2.2571, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7369095250897474e-05, | |
| "loss": 2.2594, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7171413660588617e-05, | |
| "loss": 2.2589, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.697373207027976e-05, | |
| "loss": 2.2585, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6776050479970905e-05, | |
| "loss": 2.2479, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6578368889662045e-05, | |
| "loss": 2.2532, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6380687299353185e-05, | |
| "loss": 2.2419, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.618300570904433e-05, | |
| "loss": 2.2498, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.598532411873547e-05, | |
| "loss": 2.2444, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.5787642528426613e-05, | |
| "loss": 2.2498, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.5589960938117756e-05, | |
| "loss": 2.2475, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.5392279347808897e-05, | |
| "loss": 2.2552, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.519459775750004e-05, | |
| "loss": 2.2422, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.4996916167191182e-05, | |
| "loss": 2.2537, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.4799234576882326e-05, | |
| "loss": 2.2395, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.4601552986573466e-05, | |
| "loss": 2.242, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.440387139626461e-05, | |
| "loss": 2.2437, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.4206189805955752e-05, | |
| "loss": 2.2392, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.4008508215646892e-05, | |
| "loss": 2.2444, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.3810826625338036e-05, | |
| "loss": 2.2448, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.361314503502918e-05, | |
| "loss": 2.2497, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.3415463444720322e-05, | |
| "loss": 2.2489, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.3217781854411462e-05, | |
| "loss": 2.2343, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.3020100264102606e-05, | |
| "loss": 2.2393, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.282241867379375e-05, | |
| "loss": 2.2432, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.262473708348489e-05, | |
| "loss": 2.2424, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.2427055493176031e-05, | |
| "loss": 2.2546, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.2229373902867175e-05, | |
| "loss": 2.2374, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.2031692312558317e-05, | |
| "loss": 2.2418, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.1834010722249459e-05, | |
| "loss": 2.2354, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.1636329131940601e-05, | |
| "loss": 2.2361, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.1438647541631743e-05, | |
| "loss": 2.2432, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.1240965951322887e-05, | |
| "loss": 2.2386, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.1043284361014029e-05, | |
| "loss": 2.2249, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "eval_loss": 2.0728375911712646, | |
| "eval_runtime": 187.3688, | |
| "eval_samples_per_second": 533.707, | |
| "eval_steps_per_second": 66.713, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.0845602770705169e-05, | |
| "loss": 2.2418, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.0647921180396313e-05, | |
| "loss": 2.2308, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.0450239590087455e-05, | |
| "loss": 2.2325, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.0252557999778598e-05, | |
| "loss": 2.2238, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.0054876409469739e-05, | |
| "loss": 2.2336, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.857194819160882e-06, | |
| "loss": 2.2374, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.659513228852024e-06, | |
| "loss": 2.2312, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.461831638543166e-06, | |
| "loss": 2.2274, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.264150048234308e-06, | |
| "loss": 2.2265, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.06646845792545e-06, | |
| "loss": 2.2343, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.868786867616594e-06, | |
| "loss": 2.2292, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.671105277307736e-06, | |
| "loss": 2.2292, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.473423686998878e-06, | |
| "loss": 2.2317, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.27574209669002e-06, | |
| "loss": 2.2348, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.078060506381162e-06, | |
| "loss": 2.2243, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.880378916072305e-06, | |
| "loss": 2.2302, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.682697325763447e-06, | |
| "loss": 2.2187, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.4850157354545885e-06, | |
| "loss": 2.2321, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.287334145145731e-06, | |
| "loss": 2.2179, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.089652554836873e-06, | |
| "loss": 2.2292, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.891970964528016e-06, | |
| "loss": 2.2253, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.694289374219158e-06, | |
| "loss": 2.2198, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.496607783910301e-06, | |
| "loss": 2.222, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.298926193601443e-06, | |
| "loss": 2.2334, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.101244603292585e-06, | |
| "loss": 2.2309, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.903563012983728e-06, | |
| "loss": 2.223, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.705881422674869e-06, | |
| "loss": 2.2223, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.508199832366012e-06, | |
| "loss": 2.2224, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 5.310518242057154e-06, | |
| "loss": 2.2232, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 5.1128366517482965e-06, | |
| "loss": 2.2263, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.9151550614394385e-06, | |
| "loss": 2.2241, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.717473471130581e-06, | |
| "loss": 2.2265, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.5197918808217224e-06, | |
| "loss": 2.215, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.322110290512865e-06, | |
| "loss": 2.222, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.124428700204007e-06, | |
| "loss": 2.2281, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.92674710989515e-06, | |
| "loss": 2.2164, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.7290655195862925e-06, | |
| "loss": 2.216, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.531383929277434e-06, | |
| "loss": 2.2129, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.3337023389685764e-06, | |
| "loss": 2.2055, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.136020748659719e-06, | |
| "loss": 2.2171, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.9383391583508612e-06, | |
| "loss": 2.2211, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.7406575680420036e-06, | |
| "loss": 2.2162, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.5429759777331456e-06, | |
| "loss": 2.2107, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.345294387424288e-06, | |
| "loss": 2.211, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.1476127971154304e-06, | |
| "loss": 2.2221, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.9499312068065724e-06, | |
| "loss": 2.212, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.752249616497715e-06, | |
| "loss": 2.22, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5545680261888572e-06, | |
| "loss": 2.2029, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3568864358799994e-06, | |
| "loss": 2.2029, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.1592048455711418e-06, | |
| "loss": 2.2177, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_loss": 2.0536677837371826, | |
| "eval_runtime": 187.7144, | |
| "eval_samples_per_second": 532.724, | |
| "eval_steps_per_second": 66.591, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.61523255262284e-07, | |
| "loss": 2.2143, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.638416649534263e-07, | |
| "loss": 2.2206, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.661600746445686e-07, | |
| "loss": 2.2153, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.684784843357108e-07, | |
| "loss": 2.2198, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.7079689402685307e-07, | |
| "loss": 2.2138, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 252932, | |
| "total_flos": 2.131165835358634e+18, | |
| "train_loss": 0.24544810387602325, | |
| "train_runtime": 44838.2229, | |
| "train_samples_per_second": 1444.095, | |
| "train_steps_per_second": 5.641 | |
| } | |
| ], | |
| "max_steps": 252932, | |
| "num_train_epochs": 1, | |
| "total_flos": 2.131165835358634e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |