| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9637068018426074, | |
| "global_step": 100000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9759073299539347e-05, | |
| "loss": 8.65, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.95181465990787e-05, | |
| "loss": 8.2084, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.927721989861805e-05, | |
| "loss": 8.1327, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.90362931981574e-05, | |
| "loss": 7.9749, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.879536649769674e-05, | |
| "loss": 7.9396, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.855443979723609e-05, | |
| "loss": 7.9153, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.831351309677544e-05, | |
| "loss": 7.9056, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.807258639631479e-05, | |
| "loss": 7.8028, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.7831659695854133e-05, | |
| "loss": 7.7427, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.7590732995393484e-05, | |
| "loss": 7.7145, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.734980629493283e-05, | |
| "loss": 7.6656, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.7108879594472186e-05, | |
| "loss": 7.689, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.686795289401153e-05, | |
| "loss": 7.7125, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.6627026193550874e-05, | |
| "loss": 7.5884, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.6386099493090225e-05, | |
| "loss": 7.5532, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.614517279262957e-05, | |
| "loss": 7.5568, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.590424609216892e-05, | |
| "loss": 7.5296, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.566331939170827e-05, | |
| "loss": 7.5091, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.5422392691247615e-05, | |
| "loss": 7.575, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.5181465990786966e-05, | |
| "loss": 7.4049, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.494053929032631e-05, | |
| "loss": 7.4642, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.469961258986566e-05, | |
| "loss": 7.4697, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.445868588940501e-05, | |
| "loss": 7.4979, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.4217759188944356e-05, | |
| "loss": 7.3755, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.397683248848371e-05, | |
| "loss": 7.4219, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.373590578802305e-05, | |
| "loss": 7.326, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.34949790875624e-05, | |
| "loss": 7.3567, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.325405238710175e-05, | |
| "loss": 7.3167, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.30131256866411e-05, | |
| "loss": 7.2823, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.277219898618045e-05, | |
| "loss": 7.3109, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.253127228571979e-05, | |
| "loss": 7.3226, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.229034558525914e-05, | |
| "loss": 7.2789, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.2049418884798494e-05, | |
| "loss": 7.2836, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.180849218433784e-05, | |
| "loss": 7.2753, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.156756548387718e-05, | |
| "loss": 7.2343, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.132663878341654e-05, | |
| "loss": 7.2357, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.1085712082955884e-05, | |
| "loss": 7.3118, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.0844785382495235e-05, | |
| "loss": 7.2337, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.060385868203458e-05, | |
| "loss": 7.1841, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.036293198157392e-05, | |
| "loss": 7.1875, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.012200528111328e-05, | |
| "loss": 7.1547, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 3.9881078580652625e-05, | |
| "loss": 7.1319, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.964015188019197e-05, | |
| "loss": 7.1787, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.939922517973132e-05, | |
| "loss": 7.0693, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.9158298479270664e-05, | |
| "loss": 7.1514, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.891737177881002e-05, | |
| "loss": 7.122, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.8676445078349366e-05, | |
| "loss": 7.0971, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.843551837788871e-05, | |
| "loss": 7.0787, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.819459167742806e-05, | |
| "loss": 7.0807, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.7953664976967405e-05, | |
| "loss": 7.0816, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.7712738276506756e-05, | |
| "loss": 7.0138, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.747181157604611e-05, | |
| "loss": 7.0621, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.723088487558545e-05, | |
| "loss": 6.9711, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.69899581751248e-05, | |
| "loss": 6.9892, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.6749031474664146e-05, | |
| "loss": 6.9754, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.65081047742035e-05, | |
| "loss": 6.9831, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.626717807374285e-05, | |
| "loss": 6.9777, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.602625137328219e-05, | |
| "loss": 6.9249, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.578532467282154e-05, | |
| "loss": 6.8832, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.5544397972360894e-05, | |
| "loss": 6.8826, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.530347127190024e-05, | |
| "loss": 6.8175, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.506254457143959e-05, | |
| "loss": 6.9092, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.482161787097893e-05, | |
| "loss": 6.7819, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.4580691170518284e-05, | |
| "loss": 6.8296, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.4339764470057635e-05, | |
| "loss": 6.8417, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.409883776959698e-05, | |
| "loss": 6.7656, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.385791106913633e-05, | |
| "loss": 6.7687, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.3616984368675674e-05, | |
| "loss": 6.7539, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.337605766821502e-05, | |
| "loss": 6.7836, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.3135130967754376e-05, | |
| "loss": 6.6956, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.289420426729372e-05, | |
| "loss": 6.6429, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.265327756683307e-05, | |
| "loss": 6.7027, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.2412350866372415e-05, | |
| "loss": 6.7303, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.217142416591176e-05, | |
| "loss": 6.6036, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.193049746545112e-05, | |
| "loss": 6.5406, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.168957076499046e-05, | |
| "loss": 6.5995, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.144864406452981e-05, | |
| "loss": 6.5864, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.1207717364069156e-05, | |
| "loss": 6.5612, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.09667906636085e-05, | |
| "loss": 6.5546, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.072586396314786e-05, | |
| "loss": 6.4666, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.0484937262687202e-05, | |
| "loss": 6.4376, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.024401056222655e-05, | |
| "loss": 6.4767, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.0003083861765897e-05, | |
| "loss": 6.4738, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.9762157161305248e-05, | |
| "loss": 6.4136, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.9521230460844596e-05, | |
| "loss": 6.4317, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.9280303760383943e-05, | |
| "loss": 6.4216, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.903937705992329e-05, | |
| "loss": 6.4294, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.8798450359462638e-05, | |
| "loss": 6.3409, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.855752365900199e-05, | |
| "loss": 6.3394, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.8316596958541336e-05, | |
| "loss": 6.3577, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.8075670258080684e-05, | |
| "loss": 6.2582, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.783474355762003e-05, | |
| "loss": 6.2806, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7593816857159376e-05, | |
| "loss": 6.3391, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.735289015669873e-05, | |
| "loss": 6.3571, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.7111963456238077e-05, | |
| "loss": 6.2282, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.6871036755777425e-05, | |
| "loss": 6.3069, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.663011005531677e-05, | |
| "loss": 6.2606, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.6389183354856117e-05, | |
| "loss": 6.2315, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.614825665439547e-05, | |
| "loss": 6.2534, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.590732995393482e-05, | |
| "loss": 6.1819, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.5666403253474163e-05, | |
| "loss": 6.123, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.542547655301351e-05, | |
| "loss": 6.1913, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5184549852552858e-05, | |
| "loss": 6.1996, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.494362315209221e-05, | |
| "loss": 6.175, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.4702696451631556e-05, | |
| "loss": 6.1644, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.4461769751170904e-05, | |
| "loss": 6.096, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4220843050710254e-05, | |
| "loss": 6.0954, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.3979916350249602e-05, | |
| "loss": 6.0694, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.373898964978895e-05, | |
| "loss": 6.0671, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3498062949328297e-05, | |
| "loss": 6.0728, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3257136248867644e-05, | |
| "loss": 6.1125, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3016209548406995e-05, | |
| "loss": 6.0758, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2775282847946343e-05, | |
| "loss": 6.0084, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.253435614748569e-05, | |
| "loss": 6.06, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2293429447025038e-05, | |
| "loss": 6.0424, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.2052502746564385e-05, | |
| "loss": 6.1225, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1811576046103736e-05, | |
| "loss": 6.0063, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.157064934564308e-05, | |
| "loss": 5.9357, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.132972264518243e-05, | |
| "loss": 5.9604, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.108879594472178e-05, | |
| "loss": 6.0146, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0847869244261126e-05, | |
| "loss": 5.9849, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.0606942543800474e-05, | |
| "loss": 6.0048, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.036601584333982e-05, | |
| "loss": 5.9397, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.0125089142879172e-05, | |
| "loss": 6.0005, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.988416244241852e-05, | |
| "loss": 5.9041, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.9643235741957867e-05, | |
| "loss": 5.9352, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.9402309041497215e-05, | |
| "loss": 5.9038, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.9161382341036562e-05, | |
| "loss": 5.9472, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8920455640575913e-05, | |
| "loss": 5.9163, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.867952894011526e-05, | |
| "loss": 5.9089, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.843860223965461e-05, | |
| "loss": 5.9437, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8197675539193956e-05, | |
| "loss": 5.8682, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7956748838733303e-05, | |
| "loss": 5.8244, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7715822138272654e-05, | |
| "loss": 5.851, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7474895437812e-05, | |
| "loss": 5.8414, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.723396873735135e-05, | |
| "loss": 5.8678, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.6993042036890697e-05, | |
| "loss": 5.8232, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.6752115336430048e-05, | |
| "loss": 5.8244, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.6511188635969392e-05, | |
| "loss": 5.8314, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.627026193550874e-05, | |
| "loss": 5.8577, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.602933523504809e-05, | |
| "loss": 5.7637, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.5788408534587438e-05, | |
| "loss": 5.83, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5547481834126785e-05, | |
| "loss": 5.8128, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5306555133666133e-05, | |
| "loss": 5.7518, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5065628433205484e-05, | |
| "loss": 5.8433, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.4824701732744831e-05, | |
| "loss": 5.7624, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4583775032284177e-05, | |
| "loss": 5.7489, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4342848331823528e-05, | |
| "loss": 5.7542, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.4101921631362874e-05, | |
| "loss": 5.7664, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.3860994930902225e-05, | |
| "loss": 5.7242, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.362006823044157e-05, | |
| "loss": 5.7448, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.3379141529980918e-05, | |
| "loss": 5.6599, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.3138214829520267e-05, | |
| "loss": 5.8041, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.2897288129059615e-05, | |
| "loss": 5.7124, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.2656361428598964e-05, | |
| "loss": 5.7687, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.2415434728138312e-05, | |
| "loss": 5.7568, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.217450802767766e-05, | |
| "loss": 5.729, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.1933581327217008e-05, | |
| "loss": 5.7368, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.1692654626756357e-05, | |
| "loss": 5.7522, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.1451727926295705e-05, | |
| "loss": 5.7453, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.1210801225835052e-05, | |
| "loss": 5.6756, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.09698745253744e-05, | |
| "loss": 5.764, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.072894782491375e-05, | |
| "loss": 5.6947, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.0488021124453097e-05, | |
| "loss": 5.6659, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.0247094423992446e-05, | |
| "loss": 5.7086, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.0006167723531793e-05, | |
| "loss": 5.6405, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.765241023071141e-06, | |
| "loss": 5.6981, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.524314322610489e-06, | |
| "loss": 5.732, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.283387622149838e-06, | |
| "loss": 5.6404, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.042460921689185e-06, | |
| "loss": 5.6796, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 8.801534221228534e-06, | |
| "loss": 5.7269, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.560607520767882e-06, | |
| "loss": 5.6806, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.31968082030723e-06, | |
| "loss": 5.5852, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 8.078754119846579e-06, | |
| "loss": 5.6754, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.837827419385926e-06, | |
| "loss": 5.6727, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.5969007189252746e-06, | |
| "loss": 5.7247, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.355974018464623e-06, | |
| "loss": 5.6207, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.115047318003971e-06, | |
| "loss": 5.6297, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.874120617543319e-06, | |
| "loss": 5.6616, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.633193917082667e-06, | |
| "loss": 5.6567, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.3922672166220155e-06, | |
| "loss": 5.6692, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.151340516161364e-06, | |
| "loss": 5.5955, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 5.910413815700711e-06, | |
| "loss": 5.6221, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.66948711524006e-06, | |
| "loss": 5.6195, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.428560414779408e-06, | |
| "loss": 5.5157, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.187633714318756e-06, | |
| "loss": 5.5166, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.946707013858104e-06, | |
| "loss": 5.6297, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.705780313397452e-06, | |
| "loss": 5.6392, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.4648536129368e-06, | |
| "loss": 5.6358, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.223926912476148e-06, | |
| "loss": 5.6061, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.983000212015497e-06, | |
| "loss": 5.575, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.7420735115548446e-06, | |
| "loss": 5.6051, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.501146811094193e-06, | |
| "loss": 5.6199, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.2602201106335413e-06, | |
| "loss": 5.6355, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.0192934101728892e-06, | |
| "loss": 5.5511, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.778366709712237e-06, | |
| "loss": 5.5902, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.5374400092515855e-06, | |
| "loss": 5.6163, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.2965133087909335e-06, | |
| "loss": 5.5923, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.0555866083302814e-06, | |
| "loss": 5.5735, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.81465990786963e-06, | |
| "loss": 5.6391, | |
| "step": 100000 | |
| } | |
| ], | |
| "max_steps": 103766, | |
| "num_train_epochs": 1, | |
| "total_flos": 8240866910684928.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |