| { | |
| "best_global_step": 800, | |
| "best_metric": 0.7418414950370789, | |
| "best_model_checkpoint": "checkpoints/lora_uci/checkpoint-800", | |
| "epoch": 0.28444444444444444, | |
| "eval_steps": 200, | |
| "global_step": 800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00035555555555555557, | |
| "grad_norm": 32.15512466430664, | |
| "learning_rate": 0.0, | |
| "loss": 4.3308, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0017777777777777779, | |
| "grad_norm": 29.14179801940918, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 4.2083, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0035555555555555557, | |
| "grad_norm": 22.27640724182129, | |
| "learning_rate": 3.75e-06, | |
| "loss": 3.5173, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005333333333333333, | |
| "grad_norm": 16.255199432373047, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 2.492, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0071111111111111115, | |
| "grad_norm": 13.185359954833984, | |
| "learning_rate": 7.916666666666667e-06, | |
| "loss": 1.6992, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.008888888888888889, | |
| "grad_norm": 10.229533195495605, | |
| "learning_rate": 1e-05, | |
| "loss": 1.3879, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.010666666666666666, | |
| "grad_norm": 12.396824836730957, | |
| "learning_rate": 1.2083333333333333e-05, | |
| "loss": 1.2205, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.012444444444444444, | |
| "grad_norm": 34.271759033203125, | |
| "learning_rate": 1.4166666666666668e-05, | |
| "loss": 1.12, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.014222222222222223, | |
| "grad_norm": 16.109539031982422, | |
| "learning_rate": 1.6250000000000002e-05, | |
| "loss": 1.1309, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 14.91402816772461, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 1.1121, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.017777777777777778, | |
| "grad_norm": 11.38852310180664, | |
| "learning_rate": 2.0416666666666667e-05, | |
| "loss": 1.0329, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.019555555555555555, | |
| "grad_norm": 13.237730979919434, | |
| "learning_rate": 2.25e-05, | |
| "loss": 1.0185, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.021333333333333333, | |
| "grad_norm": 29.64104461669922, | |
| "learning_rate": 2.4583333333333332e-05, | |
| "loss": 1.012, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02311111111111111, | |
| "grad_norm": 9.474141120910645, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.9874, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.024888888888888887, | |
| "grad_norm": 8.440736770629883, | |
| "learning_rate": 2.8749999999999997e-05, | |
| "loss": 0.9604, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02666666666666667, | |
| "grad_norm": 6.6439900398254395, | |
| "learning_rate": 3.0833333333333335e-05, | |
| "loss": 0.9503, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.028444444444444446, | |
| "grad_norm": 7.365396499633789, | |
| "learning_rate": 3.291666666666667e-05, | |
| "loss": 0.9339, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.030222222222222223, | |
| "grad_norm": 8.67831802368164, | |
| "learning_rate": 3.5e-05, | |
| "loss": 0.919, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 9.373591423034668, | |
| "learning_rate": 3.708333333333334e-05, | |
| "loss": 0.9177, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.033777777777777775, | |
| "grad_norm": 6.998920440673828, | |
| "learning_rate": 3.9166666666666665e-05, | |
| "loss": 0.9053, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.035555555555555556, | |
| "grad_norm": 7.322479248046875, | |
| "learning_rate": 4.125e-05, | |
| "loss": 0.9217, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.037333333333333336, | |
| "grad_norm": 8.45313549041748, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 0.9077, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.03911111111111111, | |
| "grad_norm": 10.838536262512207, | |
| "learning_rate": 4.541666666666667e-05, | |
| "loss": 0.9066, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04088888888888889, | |
| "grad_norm": 9.282814979553223, | |
| "learning_rate": 4.75e-05, | |
| "loss": 0.8939, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.042666666666666665, | |
| "grad_norm": 5.256754398345947, | |
| "learning_rate": 4.958333333333334e-05, | |
| "loss": 0.8755, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 6.6123552322387695, | |
| "learning_rate": 5.166666666666667e-05, | |
| "loss": 0.8843, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.04622222222222222, | |
| "grad_norm": 6.317594528198242, | |
| "learning_rate": 5.375e-05, | |
| "loss": 0.8802, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 4.420418739318848, | |
| "learning_rate": 5.583333333333334e-05, | |
| "loss": 0.8716, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.049777777777777775, | |
| "grad_norm": 7.11093282699585, | |
| "learning_rate": 5.7916666666666674e-05, | |
| "loss": 0.8944, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.051555555555555556, | |
| "grad_norm": 8.643278121948242, | |
| "learning_rate": 6e-05, | |
| "loss": 0.9049, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 5.504462718963623, | |
| "learning_rate": 6.208333333333334e-05, | |
| "loss": 0.9215, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05511111111111111, | |
| "grad_norm": 4.5625200271606445, | |
| "learning_rate": 6.416666666666668e-05, | |
| "loss": 0.8763, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.05688888888888889, | |
| "grad_norm": 4.5830397605896, | |
| "learning_rate": 6.625e-05, | |
| "loss": 0.8967, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.058666666666666666, | |
| "grad_norm": 5.370687961578369, | |
| "learning_rate": 6.833333333333333e-05, | |
| "loss": 0.868, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.060444444444444446, | |
| "grad_norm": 8.188835144042969, | |
| "learning_rate": 7.041666666666668e-05, | |
| "loss": 0.8853, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06222222222222222, | |
| "grad_norm": 3.952087163925171, | |
| "learning_rate": 7.25e-05, | |
| "loss": 0.8724, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 4.194353103637695, | |
| "learning_rate": 7.458333333333333e-05, | |
| "loss": 0.8581, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06577777777777778, | |
| "grad_norm": 2.985386610031128, | |
| "learning_rate": 7.666666666666667e-05, | |
| "loss": 0.8496, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.06755555555555555, | |
| "grad_norm": 5.666004657745361, | |
| "learning_rate": 7.875e-05, | |
| "loss": 0.8816, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.06933333333333333, | |
| "grad_norm": 3.95521879196167, | |
| "learning_rate": 8.083333333333334e-05, | |
| "loss": 0.8872, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "grad_norm": 4.558910369873047, | |
| "learning_rate": 8.291666666666667e-05, | |
| "loss": 0.8802, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "eval_loss": 0.8564087748527527, | |
| "eval_runtime": 155.7786, | |
| "eval_samples_per_second": 32.097, | |
| "eval_steps_per_second": 4.012, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07288888888888889, | |
| "grad_norm": 2.4701411724090576, | |
| "learning_rate": 8.5e-05, | |
| "loss": 0.8383, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.07466666666666667, | |
| "grad_norm": 4.364571571350098, | |
| "learning_rate": 8.708333333333334e-05, | |
| "loss": 0.8763, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07644444444444444, | |
| "grad_norm": 4.059802532196045, | |
| "learning_rate": 8.916666666666667e-05, | |
| "loss": 0.8928, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.07822222222222222, | |
| "grad_norm": 7.405764579772949, | |
| "learning_rate": 9.125e-05, | |
| "loss": 0.8619, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 4.007632732391357, | |
| "learning_rate": 9.333333333333334e-05, | |
| "loss": 0.9656, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.08177777777777778, | |
| "grad_norm": 6.396026611328125, | |
| "learning_rate": 9.541666666666668e-05, | |
| "loss": 0.9084, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08355555555555555, | |
| "grad_norm": 4.630360126495361, | |
| "learning_rate": 9.75e-05, | |
| "loss": 0.8617, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.08533333333333333, | |
| "grad_norm": 2.987304925918579, | |
| "learning_rate": 9.958333333333335e-05, | |
| "loss": 0.8696, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08711111111111111, | |
| "grad_norm": 3.981341600418091, | |
| "learning_rate": 9.999915384288722e-05, | |
| "loss": 0.8412, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 2.754917860031128, | |
| "learning_rate": 9.999571637870036e-05, | |
| "loss": 0.8526, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09066666666666667, | |
| "grad_norm": 2.6841213703155518, | |
| "learning_rate": 9.998963490426943e-05, | |
| "loss": 0.853, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.09244444444444444, | |
| "grad_norm": 3.0342020988464355, | |
| "learning_rate": 9.998090974121159e-05, | |
| "loss": 0.8551, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.09422222222222222, | |
| "grad_norm": 3.418090343475342, | |
| "learning_rate": 9.99695413509548e-05, | |
| "loss": 0.8315, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 2.6049306392669678, | |
| "learning_rate": 9.995553033471335e-05, | |
| "loss": 0.8239, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.09777777777777778, | |
| "grad_norm": 4.258927345275879, | |
| "learning_rate": 9.993887743345614e-05, | |
| "loss": 0.84, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.09955555555555555, | |
| "grad_norm": 3.5856103897094727, | |
| "learning_rate": 9.991958352786744e-05, | |
| "loss": 0.8397, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.10133333333333333, | |
| "grad_norm": 4.18107271194458, | |
| "learning_rate": 9.989764963830037e-05, | |
| "loss": 0.8283, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.10311111111111111, | |
| "grad_norm": 2.9539637565612793, | |
| "learning_rate": 9.987307692472287e-05, | |
| "loss": 0.8315, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.10488888888888889, | |
| "grad_norm": 2.6121134757995605, | |
| "learning_rate": 9.98458666866564e-05, | |
| "loss": 0.8203, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 3.4740283489227295, | |
| "learning_rate": 9.98160203631072e-05, | |
| "loss": 0.83, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.10844444444444444, | |
| "grad_norm": 3.486816167831421, | |
| "learning_rate": 9.978353953249022e-05, | |
| "loss": 0.8269, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.11022222222222222, | |
| "grad_norm": 2.7455246448516846, | |
| "learning_rate": 9.974842591254558e-05, | |
| "loss": 0.8332, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 2.8629767894744873, | |
| "learning_rate": 9.971068136024781e-05, | |
| "loss": 0.8305, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.11377777777777778, | |
| "grad_norm": 2.647754192352295, | |
| "learning_rate": 9.967030787170757e-05, | |
| "loss": 0.8213, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.11555555555555555, | |
| "grad_norm": 2.873353958129883, | |
| "learning_rate": 9.962730758206611e-05, | |
| "loss": 0.8269, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.11733333333333333, | |
| "grad_norm": 1.9501383304595947, | |
| "learning_rate": 9.95816827653824e-05, | |
| "loss": 0.8001, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.11911111111111111, | |
| "grad_norm": 2.3588831424713135, | |
| "learning_rate": 9.95334358345128e-05, | |
| "loss": 0.7965, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.12088888888888889, | |
| "grad_norm": 1.9669915437698364, | |
| "learning_rate": 9.948256934098352e-05, | |
| "loss": 0.7949, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.12266666666666666, | |
| "grad_norm": 2.3287253379821777, | |
| "learning_rate": 9.942908597485558e-05, | |
| "loss": 0.8312, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.12444444444444444, | |
| "grad_norm": 3.263697385787964, | |
| "learning_rate": 9.93729885645827e-05, | |
| "loss": 0.8305, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12622222222222224, | |
| "grad_norm": 1.87248694896698, | |
| "learning_rate": 9.931428007686158e-05, | |
| "loss": 0.8292, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 2.7504541873931885, | |
| "learning_rate": 9.925296361647504e-05, | |
| "loss": 0.8285, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.12977777777777777, | |
| "grad_norm": 2.169858694076538, | |
| "learning_rate": 9.918904242612795e-05, | |
| "loss": 0.8166, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.13155555555555556, | |
| "grad_norm": 2.2024645805358887, | |
| "learning_rate": 9.912251988627549e-05, | |
| "loss": 0.7927, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 2.103611946105957, | |
| "learning_rate": 9.905339951494463e-05, | |
| "loss": 0.8236, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.1351111111111111, | |
| "grad_norm": 2.265293836593628, | |
| "learning_rate": 9.898168496754794e-05, | |
| "loss": 0.7926, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.1368888888888889, | |
| "grad_norm": 1.8098556995391846, | |
| "learning_rate": 9.890738003669029e-05, | |
| "loss": 0.812, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.13866666666666666, | |
| "grad_norm": 1.6579197645187378, | |
| "learning_rate": 9.88304886519683e-05, | |
| "loss": 0.7933, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.14044444444444446, | |
| "grad_norm": 1.664461612701416, | |
| "learning_rate": 9.875101487976253e-05, | |
| "loss": 0.798, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "grad_norm": 1.6052963733673096, | |
| "learning_rate": 9.866896292302243e-05, | |
| "loss": 0.7937, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "eval_loss": 0.791572093963623, | |
| "eval_runtime": 159.2149, | |
| "eval_samples_per_second": 31.404, | |
| "eval_steps_per_second": 3.926, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 2.126084566116333, | |
| "learning_rate": 9.858433712104403e-05, | |
| "loss": 0.8188, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.14577777777777778, | |
| "grad_norm": 3.2941622734069824, | |
| "learning_rate": 9.849714194924046e-05, | |
| "loss": 0.8067, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.14755555555555555, | |
| "grad_norm": 1.658234715461731, | |
| "learning_rate": 9.84073820189054e-05, | |
| "loss": 0.7953, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.14933333333333335, | |
| "grad_norm": 2.6132164001464844, | |
| "learning_rate": 9.831506207696898e-05, | |
| "loss": 0.8044, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1511111111111111, | |
| "grad_norm": 1.6197243928909302, | |
| "learning_rate": 9.822018700574695e-05, | |
| "loss": 0.7818, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.15288888888888888, | |
| "grad_norm": 2.1293976306915283, | |
| "learning_rate": 9.812276182268236e-05, | |
| "loss": 0.7796, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.15466666666666667, | |
| "grad_norm": 2.590989589691162, | |
| "learning_rate": 9.802279168008029e-05, | |
| "loss": 0.7903, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.15644444444444444, | |
| "grad_norm": 1.674521803855896, | |
| "learning_rate": 9.792028186483526e-05, | |
| "loss": 0.7772, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1582222222222222, | |
| "grad_norm": 2.3836069107055664, | |
| "learning_rate": 9.781523779815179e-05, | |
| "loss": 0.7934, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.3944199085235596, | |
| "learning_rate": 9.770766503525754e-05, | |
| "loss": 0.7932, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16177777777777777, | |
| "grad_norm": 2.112563371658325, | |
| "learning_rate": 9.759756926510965e-05, | |
| "loss": 0.7873, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.16355555555555557, | |
| "grad_norm": 2.041534185409546, | |
| "learning_rate": 9.748495631009386e-05, | |
| "loss": 0.796, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.16533333333333333, | |
| "grad_norm": 1.7045772075653076, | |
| "learning_rate": 9.736983212571646e-05, | |
| "loss": 0.7791, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.1671111111111111, | |
| "grad_norm": 1.5439887046813965, | |
| "learning_rate": 9.725220280028957e-05, | |
| "loss": 0.7939, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1688888888888889, | |
| "grad_norm": 1.459672451019287, | |
| "learning_rate": 9.713207455460894e-05, | |
| "loss": 0.7749, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.17066666666666666, | |
| "grad_norm": 3.114187240600586, | |
| "learning_rate": 9.700945374162506e-05, | |
| "loss": 0.7785, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.17244444444444446, | |
| "grad_norm": 1.7480342388153076, | |
| "learning_rate": 9.688434684610726e-05, | |
| "loss": 0.7653, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.17422222222222222, | |
| "grad_norm": 1.854999303817749, | |
| "learning_rate": 9.67567604843006e-05, | |
| "loss": 0.7878, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 2.006537437438965, | |
| "learning_rate": 9.662670140357611e-05, | |
| "loss": 0.7851, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 1.9404226541519165, | |
| "learning_rate": 9.649417648207388e-05, | |
| "loss": 0.7719, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.17955555555555555, | |
| "grad_norm": 1.7404245138168335, | |
| "learning_rate": 9.635919272833938e-05, | |
| "loss": 0.775, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.18133333333333335, | |
| "grad_norm": 1.4806632995605469, | |
| "learning_rate": 9.622175728095271e-05, | |
| "loss": 0.7822, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.1831111111111111, | |
| "grad_norm": 1.607060432434082, | |
| "learning_rate": 9.60818774081512e-05, | |
| "loss": 0.7822, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.18488888888888888, | |
| "grad_norm": 1.6430386304855347, | |
| "learning_rate": 9.593956050744492e-05, | |
| "loss": 0.7711, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.18666666666666668, | |
| "grad_norm": 2.3202788829803467, | |
| "learning_rate": 9.579481410522556e-05, | |
| "loss": 0.7839, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.18844444444444444, | |
| "grad_norm": 2.160609722137451, | |
| "learning_rate": 9.564764585636833e-05, | |
| "loss": 0.7854, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.1902222222222222, | |
| "grad_norm": 1.7440357208251953, | |
| "learning_rate": 9.549806354382717e-05, | |
| "loss": 0.7806, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 1.8481121063232422, | |
| "learning_rate": 9.534607507822313e-05, | |
| "loss": 0.7701, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.19377777777777777, | |
| "grad_norm": 1.9447892904281616, | |
| "learning_rate": 9.519168849742604e-05, | |
| "loss": 0.772, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.19555555555555557, | |
| "grad_norm": 2.9007174968719482, | |
| "learning_rate": 9.503491196612939e-05, | |
| "loss": 0.7486, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.19733333333333333, | |
| "grad_norm": 1.5981870889663696, | |
| "learning_rate": 9.487575377541864e-05, | |
| "loss": 0.7713, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.1991111111111111, | |
| "grad_norm": 1.6032360792160034, | |
| "learning_rate": 9.471422234233259e-05, | |
| "loss": 0.7596, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2008888888888889, | |
| "grad_norm": 1.7337145805358887, | |
| "learning_rate": 9.45503262094184e-05, | |
| "loss": 0.7725, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.20266666666666666, | |
| "grad_norm": 1.7922943830490112, | |
| "learning_rate": 9.438407404427971e-05, | |
| "loss": 0.7646, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.20444444444444446, | |
| "grad_norm": 1.2763404846191406, | |
| "learning_rate": 9.421547463911835e-05, | |
| "loss": 0.7744, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.20622222222222222, | |
| "grad_norm": 1.6107685565948486, | |
| "learning_rate": 9.404453691026929e-05, | |
| "loss": 0.7854, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 1.531690239906311, | |
| "learning_rate": 9.38712698977291e-05, | |
| "loss": 0.7765, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.20977777777777779, | |
| "grad_norm": 4.303262710571289, | |
| "learning_rate": 9.369568276467797e-05, | |
| "loss": 0.7451, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.21155555555555555, | |
| "grad_norm": 1.4451102018356323, | |
| "learning_rate": 9.351778479699499e-05, | |
| "loss": 0.767, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 1.9426709413528442, | |
| "learning_rate": 9.333758540276716e-05, | |
| "loss": 0.7611, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "eval_loss": 0.7643172144889832, | |
| "eval_runtime": 149.0272, | |
| "eval_samples_per_second": 33.551, | |
| "eval_steps_per_second": 4.194, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21511111111111111, | |
| "grad_norm": 1.740432858467102, | |
| "learning_rate": 9.315509411179182e-05, | |
| "loss": 0.763, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.21688888888888888, | |
| "grad_norm": 2.1427745819091797, | |
| "learning_rate": 9.297032057507264e-05, | |
| "loss": 0.7717, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.21866666666666668, | |
| "grad_norm": 2.2643210887908936, | |
| "learning_rate": 9.278327456430926e-05, | |
| "loss": 0.7917, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.22044444444444444, | |
| "grad_norm": 1.9076204299926758, | |
| "learning_rate": 9.259396597138052e-05, | |
| "loss": 0.7637, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 1.2893517017364502, | |
| "learning_rate": 9.24024048078213e-05, | |
| "loss": 0.7435, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 1.9294421672821045, | |
| "learning_rate": 9.22086012042931e-05, | |
| "loss": 0.7446, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.22577777777777777, | |
| "grad_norm": 1.516155481338501, | |
| "learning_rate": 9.201256541004829e-05, | |
| "loss": 0.7608, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.22755555555555557, | |
| "grad_norm": 1.4733030796051025, | |
| "learning_rate": 9.181430779238797e-05, | |
| "loss": 0.7708, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.22933333333333333, | |
| "grad_norm": 1.7201124429702759, | |
| "learning_rate": 9.16138388361139e-05, | |
| "loss": 0.7475, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.2311111111111111, | |
| "grad_norm": 1.573810338973999, | |
| "learning_rate": 9.141116914297378e-05, | |
| "loss": 0.7782, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2328888888888889, | |
| "grad_norm": 1.3574259281158447, | |
| "learning_rate": 9.120630943110077e-05, | |
| "loss": 0.7406, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.23466666666666666, | |
| "grad_norm": 1.912653923034668, | |
| "learning_rate": 9.099927053444662e-05, | |
| "loss": 0.7462, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.23644444444444446, | |
| "grad_norm": 2.2716944217681885, | |
| "learning_rate": 9.079006340220862e-05, | |
| "loss": 0.7526, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.23822222222222222, | |
| "grad_norm": 1.625752329826355, | |
| "learning_rate": 9.057869909825062e-05, | |
| "loss": 0.762, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.2941769361495972, | |
| "learning_rate": 9.0365188800518e-05, | |
| "loss": 0.7544, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.24177777777777779, | |
| "grad_norm": 1.6075447797775269, | |
| "learning_rate": 9.01495438004464e-05, | |
| "loss": 0.7639, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.24355555555555555, | |
| "grad_norm": 1.671107292175293, | |
| "learning_rate": 8.993177550236464e-05, | |
| "loss": 0.7567, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.24533333333333332, | |
| "grad_norm": 1.3904489278793335, | |
| "learning_rate": 8.971189542289162e-05, | |
| "loss": 0.7633, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.24711111111111111, | |
| "grad_norm": 1.997207760810852, | |
| "learning_rate": 8.948991519032716e-05, | |
| "loss": 0.7403, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.24888888888888888, | |
| "grad_norm": 1.5448390245437622, | |
| "learning_rate": 8.926584654403724e-05, | |
| "loss": 0.7424, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.25066666666666665, | |
| "grad_norm": 1.760542392730713, | |
| "learning_rate": 8.903970133383297e-05, | |
| "loss": 0.7436, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.25244444444444447, | |
| "grad_norm": 1.6473764181137085, | |
| "learning_rate": 8.881149151934398e-05, | |
| "loss": 0.7569, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.25422222222222224, | |
| "grad_norm": 1.2679284811019897, | |
| "learning_rate": 8.858122916938601e-05, | |
| "loss": 0.7556, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 1.3798352479934692, | |
| "learning_rate": 8.834892646132254e-05, | |
| "loss": 0.7446, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2577777777777778, | |
| "grad_norm": 1.692984700202942, | |
| "learning_rate": 8.811459568042091e-05, | |
| "loss": 0.7695, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.25955555555555554, | |
| "grad_norm": 1.6680200099945068, | |
| "learning_rate": 8.787824921920249e-05, | |
| "loss": 0.7462, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2613333333333333, | |
| "grad_norm": 1.4423996210098267, | |
| "learning_rate": 8.763989957678742e-05, | |
| "loss": 0.7637, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.26311111111111113, | |
| "grad_norm": 1.6775215864181519, | |
| "learning_rate": 8.739955935823351e-05, | |
| "loss": 0.755, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2648888888888889, | |
| "grad_norm": 1.6499780416488647, | |
| "learning_rate": 8.715724127386972e-05, | |
| "loss": 0.7468, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 1.3906657695770264, | |
| "learning_rate": 8.691295813862386e-05, | |
| "loss": 0.7458, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26844444444444443, | |
| "grad_norm": 1.3843157291412354, | |
| "learning_rate": 8.666672287134494e-05, | |
| "loss": 0.7461, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.2702222222222222, | |
| "grad_norm": 1.6778830289840698, | |
| "learning_rate": 8.641854849412001e-05, | |
| "loss": 0.7284, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 1.2424935102462769, | |
| "learning_rate": 8.61684481315854e-05, | |
| "loss": 0.7774, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.2737777777777778, | |
| "grad_norm": 1.291231393814087, | |
| "learning_rate": 8.591643501023265e-05, | |
| "loss": 0.7428, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.27555555555555555, | |
| "grad_norm": 1.12603759765625, | |
| "learning_rate": 8.566252245770909e-05, | |
| "loss": 0.7377, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.2773333333333333, | |
| "grad_norm": 1.348775029182434, | |
| "learning_rate": 8.54067239021129e-05, | |
| "loss": 0.762, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2791111111111111, | |
| "grad_norm": 1.703519582748413, | |
| "learning_rate": 8.51490528712831e-05, | |
| "loss": 0.7533, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.2808888888888889, | |
| "grad_norm": 1.8479610681533813, | |
| "learning_rate": 8.488952299208401e-05, | |
| "loss": 0.7535, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2826666666666667, | |
| "grad_norm": 1.1511187553405762, | |
| "learning_rate": 8.462814798968472e-05, | |
| "loss": 0.7555, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 2.5522501468658447, | |
| "learning_rate": 8.43649416868331e-05, | |
| "loss": 0.741, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "eval_loss": 0.7418414950370789, | |
| "eval_runtime": 169.2643, | |
| "eval_samples_per_second": 29.54, | |
| "eval_steps_per_second": 3.692, | |
| "step": 800 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2400, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 775521753600000.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |