| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.991097922848665, | |
| "global_step": 252, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 1.1553, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 1.1604, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 1.0248, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.9345, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.8861, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.8385, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.806, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.7936, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.752, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.7385, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7177, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9999150370633987e-05, | |
| "loss": 0.7116, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9996601626909962e-05, | |
| "loss": 0.697, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9992354201925427e-05, | |
| "loss": 0.6884, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.998640881742778e-05, | |
| "loss": 0.6752, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.997876648369168e-05, | |
| "loss": 0.6808, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.996942849934735e-05, | |
| "loss": 0.6427, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9958396451159937e-05, | |
| "loss": 0.6587, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9945672213759872e-05, | |
| "loss": 0.6427, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.993125794932429e-05, | |
| "loss": 0.6251, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9915156107209673e-05, | |
| "loss": 0.6257, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.98973694235356e-05, | |
| "loss": 0.6167, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9877900920719825e-05, | |
| "loss": 0.596, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9856753906964686e-05, | |
| "loss": 0.6116, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.983393197569497e-05, | |
| "loss": 0.6072, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.980943900494727e-05, | |
| "loss": 0.599, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9783279156711022e-05, | |
| "loss": 0.5904, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.975545687622129e-05, | |
| "loss": 0.5887, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.972597689120338e-05, | |
| "loss": 0.585, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9694844211069477e-05, | |
| "loss": 0.5813, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.966206412606745e-05, | |
| "loss": 0.5847, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9627642206381864e-05, | |
| "loss": 0.5883, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9591584301187477e-05, | |
| "loss": 0.5879, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9553896537655317e-05, | |
| "loss": 0.5753, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.951458531991151e-05, | |
| "loss": 0.5601, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9473657327949055e-05, | |
| "loss": 0.5665, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9431119516492725e-05, | |
| "loss": 0.5651, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9386979113817283e-05, | |
| "loss": 0.5695, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.934124362051919e-05, | |
| "loss": 0.5712, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9293920808242084e-05, | |
| "loss": 0.5486, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.924501871835616e-05, | |
| "loss": 0.5547, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9194545660591753e-05, | |
| "loss": 0.55, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9142510211627264e-05, | |
| "loss": 0.5454, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9088921213631803e-05, | |
| "loss": 0.5496, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9033787772762647e-05, | |
| "loss": 0.551, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8977119257617878e-05, | |
| "loss": 0.5534, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8918925297644418e-05, | |
| "loss": 0.5368, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8859215781501727e-05, | |
| "loss": 0.5219, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8798000855381472e-05, | |
| "loss": 0.5389, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.873529092128343e-05, | |
| "loss": 0.5252, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8671096635247914e-05, | |
| "loss": 0.5377, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.860542890554503e-05, | |
| "loss": 0.5206, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.853829889082109e-05, | |
| "loss": 0.5387, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8469717998202464e-05, | |
| "loss": 0.5224, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8399697881357214e-05, | |
| "loss": 0.523, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8328250438514837e-05, | |
| "loss": 0.5122, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8255387810444447e-05, | |
| "loss": 0.5245, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.818112237839174e-05, | |
| "loss": 0.5251, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.810546676197511e-05, | |
| "loss": 0.524, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8028433817041237e-05, | |
| "loss": 0.5161, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7950036633480557e-05, | |
| "loss": 0.5339, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.787028853300294e-05, | |
| "loss": 0.5087, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7789203066874e-05, | |
| "loss": 0.5284, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7706794013612367e-05, | |
| "loss": 0.5052, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7623075376648374e-05, | |
| "loss": 0.5093, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7538061381944524e-05, | |
| "loss": 0.5005, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.745176647557809e-05, | |
| "loss": 0.5097, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7364205321286393e-05, | |
| "loss": 0.516, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7275392797975034e-05, | |
| "loss": 0.51, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.718534399718959e-05, | |
| "loss": 0.5025, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.709407422055116e-05, | |
| "loss": 0.4973, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.700159897715624e-05, | |
| "loss": 0.4971, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6907933980941312e-05, | |
| "loss": 0.4964, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.681309514801265e-05, | |
| "loss": 0.5095, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6717098593941753e-05, | |
| "loss": 0.5021, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.661996063102689e-05, | |
| "loss": 0.5028, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6521697765521232e-05, | |
| "loss": 0.4901, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.642232669482801e-05, | |
| "loss": 0.4853, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6321864304663174e-05, | |
| "loss": 0.4929, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.62203276661861e-05, | |
| "loss": 0.4962, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6117734033098744e-05, | |
| "loss": 0.4898, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6014100838713796e-05, | |
| "loss": 0.4684, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.59094456929923e-05, | |
| "loss": 0.4905, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.580378637955128e-05, | |
| "loss": 0.4744, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5697140852641835e-05, | |
| "loss": 0.4604, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5589527234098247e-05, | |
| "loss": 0.4802, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5480963810258614e-05, | |
| "loss": 0.4716, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5371469028857534e-05, | |
| "loss": 0.4654, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5261061495891345e-05, | |
| "loss": 0.4869, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.514975997245649e-05, | |
| "loss": 0.4659, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5037583371561538e-05, | |
| "loss": 0.4681, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4924550754913341e-05, | |
| "loss": 0.4534, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4810681329677988e-05, | |
| "loss": 0.4569, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.4695994445216985e-05, | |
| "loss": 0.4659, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.458050958979933e-05, | |
| "loss": 0.4657, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4464246387289913e-05, | |
| "loss": 0.4735, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4347224593814946e-05, | |
| "loss": 0.4599, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4229464094404866e-05, | |
| "loss": 0.4519, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.4110984899615367e-05, | |
| "loss": 0.4484, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3991807142127082e-05, | |
| "loss": 0.4454, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3871951073324508e-05, | |
| "loss": 0.4548, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3751437059854809e-05, | |
| "loss": 0.4573, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.3630285580166946e-05, | |
| "loss": 0.4429, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3508517221031898e-05, | |
| "loss": 0.463, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3386152674044421e-05, | |
| "loss": 0.4579, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.3263212732107014e-05, | |
| "loss": 0.441, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.3139718285896657e-05, | |
| "loss": 0.4566, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.3015690320314952e-05, | |
| "loss": 0.4614, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2891149910922267e-05, | |
| "loss": 0.4564, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.276611822035641e-05, | |
| "loss": 0.4416, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.264061649473657e-05, | |
| "loss": 0.4506, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2514666060053075e-05, | |
| "loss": 0.4343, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2388288318543513e-05, | |
| "loss": 0.4395, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2261504745055963e-05, | |
| "loss": 0.4525, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.2134336883399855e-05, | |
| "loss": 0.4418, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.2006806342685127e-05, | |
| "loss": 0.4421, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1878934793650273e-05, | |
| "loss": 0.4347, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1750743964979919e-05, | |
| "loss": 0.4375, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1622255639612553e-05, | |
| "loss": 0.4395, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.1493491651039077e-05, | |
| "loss": 0.4325, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1364473879592674e-05, | |
| "loss": 0.4393, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1235224248730821e-05, | |
| "loss": 0.4385, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.11057647213099e-05, | |
| "loss": 0.4364, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0976117295853155e-05, | |
| "loss": 0.4499, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0846304002812564e-05, | |
| "loss": 0.428, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0716346900825298e-05, | |
| "loss": 0.4225, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0586268072965395e-05, | |
| "loss": 0.4303, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0456089622991264e-05, | |
| "loss": 0.4216, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0325833671589687e-05, | |
| "loss": 0.4241, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0195522352616942e-05, | |
| "loss": 0.444, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.0065177809337703e-05, | |
| "loss": 0.4296, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.934822190662299e-06, | |
| "loss": 0.4177, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.804477647383061e-06, | |
| "loss": 0.4342, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.674166328410318e-06, | |
| "loss": 0.4281, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.543910377008741e-06, | |
| "loss": 0.4379, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.413731927034607e-06, | |
| "loss": 0.4059, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.283653099174704e-06, | |
| "loss": 0.4263, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.15369599718744e-06, | |
| "loss": 0.4219, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.023882704146848e-06, | |
| "loss": 0.4231, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.894235278690104e-06, | |
| "loss": 0.406, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.764775751269184e-06, | |
| "loss": 0.4154, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.63552612040733e-06, | |
| "loss": 0.4025, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.506508348960924e-06, | |
| "loss": 0.4263, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.377744360387447e-06, | |
| "loss": 0.4104, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.249256035020086e-06, | |
| "loss": 0.4046, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.12106520634973e-06, | |
| "loss": 0.4109, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.993193657314874e-06, | |
| "loss": 0.4103, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.865663116600149e-06, | |
| "loss": 0.407, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.738495254944042e-06, | |
| "loss": 0.4135, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.611711681456493e-06, | |
| "loss": 0.4103, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.485333939946926e-06, | |
| "loss": 0.4002, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.359383505263431e-06, | |
| "loss": 0.4108, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.233881779643595e-06, | |
| "loss": 0.4185, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.108850089077736e-06, | |
| "loss": 0.4168, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 6.984309679685049e-06, | |
| "loss": 0.4153, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.86028171410335e-06, | |
| "loss": 0.3995, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.736787267892991e-06, | |
| "loss": 0.4058, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.613847325955578e-06, | |
| "loss": 0.3968, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.491482778968103e-06, | |
| "loss": 0.4005, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.369714419833056e-06, | |
| "loss": 0.4184, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.2485629401451954e-06, | |
| "loss": 0.3958, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.128048926675494e-06, | |
| "loss": 0.4159, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.0081928578729235e-06, | |
| "loss": 0.4051, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.889015100384636e-06, | |
| "loss": 0.3841, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.770535905595138e-06, | |
| "loss": 0.3989, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.652775406185056e-06, | |
| "loss": 0.3979, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.535753612710091e-06, | |
| "loss": 0.402, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.419490410200675e-06, | |
| "loss": 0.4114, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.304005554783015e-06, | |
| "loss": 0.3852, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.189318670322016e-06, | |
| "loss": 0.4014, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.075449245086661e-06, | |
| "loss": 0.3757, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.962416628438466e-06, | |
| "loss": 0.3987, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.850240027543509e-06, | |
| "loss": 0.3715, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.738938504108659e-06, | |
| "loss": 0.3829, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.6285309711424706e-06, | |
| "loss": 0.3853, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.519036189741386e-06, | |
| "loss": 0.3796, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.410472765901755e-06, | |
| "loss": 0.3854, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.302859147358168e-06, | |
| "loss": 0.373, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.196213620448724e-06, | |
| "loss": 0.3872, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.0905543070077036e-06, | |
| "loss": 0.3873, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.985899161286205e-06, | |
| "loss": 0.3743, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.882265966901257e-06, | |
| "loss": 0.3845, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.7796723338138995e-06, | |
| "loss": 0.3705, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.6781356953368286e-06, | |
| "loss": 0.3892, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.5776733051719935e-06, | |
| "loss": 0.3832, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.4783022344787698e-06, | |
| "loss": 0.3597, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.380039368973115e-06, | |
| "loss": 0.3861, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.2829014060582498e-06, | |
| "loss": 0.3847, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.1869048519873514e-06, | |
| "loss": 0.3665, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.0920660190586893e-06, | |
| "loss": 0.3695, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.998401022843761e-06, | |
| "loss": 0.3818, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.9059257794488428e-06, | |
| "loss": 0.3738, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.8146560028104155e-06, | |
| "loss": 0.3733, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.724607202024969e-06, | |
| "loss": 0.3745, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.635794678713611e-06, | |
| "loss": 0.387, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.5482335244219114e-06, | |
| "loss": 0.3644, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.4619386180554783e-06, | |
| "loss": 0.3686, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.3769246233516243e-06, | |
| "loss": 0.3674, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.2932059863876364e-06, | |
| "loss": 0.3723, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.210796933126005e-06, | |
| "loss": 0.3715, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.129711466997062e-06, | |
| "loss": 0.3498, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.049963366519446e-06, | |
| "loss": 0.3688, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.9715661829587653e-06, | |
| "loss": 0.3791, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.8945332380248914e-06, | |
| "loss": 0.3668, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8188776216082604e-06, | |
| "loss": 0.3626, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7446121895555556e-06, | |
| "loss": 0.3623, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6717495614851654e-06, | |
| "loss": 0.3736, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.6003021186427892e-06, | |
| "loss": 0.3863, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5302820017975396e-06, | |
| "loss": 0.3607, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.4617011091789135e-06, | |
| "loss": 0.3533, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.3945710944549705e-06, | |
| "loss": 0.3613, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.3289033647520878e-06, | |
| "loss": 0.3776, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.2647090787165694e-06, | |
| "loss": 0.3623, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.201999144618531e-06, | |
| "loss": 0.36, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1407842184982786e-06, | |
| "loss": 0.3683, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0810747023555879e-06, | |
| "loss": 0.3642, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0228807423821262e-06, | |
| "loss": 0.3586, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.662122272373574e-07, | |
| "loss": 0.3667, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.110787863682002e-07, | |
| "loss": 0.3698, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.574897883727384e-07, | |
| "loss": 0.3607, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.054543394082503e-07, | |
| "loss": 0.3726, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.549812816438395e-07, | |
| "loss": 0.3602, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.06079191757918e-07, | |
| "loss": 0.3668, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.587563794808127e-07, | |
| "loss": 0.3639, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.130208861827203e-07, | |
| "loss": 0.3545, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.688804835072748e-07, | |
| "loss": 0.3713, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.263426720509469e-07, | |
| "loss": 0.3577, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.854146800884929e-07, | |
| "loss": 0.3818, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.461034623446847e-07, | |
| "loss": 0.3567, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.084156988125232e-07, | |
| "loss": 0.3596, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.723577936181366e-07, | |
| "loss": 0.354, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.3793587393255e-07, | |
| "loss": 0.3471, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.0515578893052343e-07, | |
| "loss": 0.3472, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.7402310879662497e-07, | |
| "loss": 0.3677, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.4454312377871105e-07, | |
| "loss": 0.3658, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.167208432889789e-07, | |
| "loss": 0.345, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.9056099505273428e-07, | |
| "loss": 0.3525, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.6606802430503166e-07, | |
| "loss": 0.3687, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.43246093035313e-07, | |
| "loss": 0.3668, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.2209907928017794e-07, | |
| "loss": 0.3622, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.02630576464402e-07, | |
| "loss": 0.3723, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.484389279032835e-08, | |
| "loss": 0.3619, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 6.874205067571082e-08, | |
| "loss": 0.3743, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 5.4327786240132576e-08, | |
| "loss": 0.3619, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.1603548840062344e-08, | |
| "loss": 0.3658, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.0571500652651906e-08, | |
| "loss": 0.3573, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 2.1233516308323266e-08, | |
| "loss": 0.3577, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.3591182572219031e-08, | |
| "loss": 0.3602, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 7.645798074572552e-09, | |
| "loss": 0.3601, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.3983730900377654e-09, | |
| "loss": 0.3694, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.496293660120725e-10, | |
| "loss": 0.3598, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.0, | |
| "loss": 0.3427, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "step": 252, | |
| "total_flos": 6.286385791107072e+17, | |
| "train_loss": 0.46650314425665235, | |
| "train_runtime": 17988.2658, | |
| "train_samples_per_second": 1.798, | |
| "train_steps_per_second": 0.014 | |
| } | |
| ], | |
| "max_steps": 252, | |
| "num_train_epochs": 3, | |
| "total_flos": 6.286385791107072e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |