| { | |
| "best_global_step": 400, | |
| "best_metric": 0.34772011637687683, | |
| "best_model_checkpoint": "./luau-model/checkpoint-400", | |
| "epoch": 2.7777777777777777, | |
| "eval_steps": 100, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.034722222222222224, | |
| "grad_norm": 0.19553522765636444, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.5215, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06944444444444445, | |
| "grad_norm": 0.18851090967655182, | |
| "learning_rate": 3.6e-05, | |
| "loss": 1.4318, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10416666666666667, | |
| "grad_norm": 0.2147289365530014, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 1.4315, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 0.22880084812641144, | |
| "learning_rate": 7.6e-05, | |
| "loss": 1.3475, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1736111111111111, | |
| "grad_norm": 0.27205583453178406, | |
| "learning_rate": 9.6e-05, | |
| "loss": 1.3244, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 0.2366049736738205, | |
| "learning_rate": 0.000116, | |
| "loss": 1.3784, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.24305555555555555, | |
| "grad_norm": 0.2528875768184662, | |
| "learning_rate": 0.00013600000000000003, | |
| "loss": 1.3578, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 0.2789267897605896, | |
| "learning_rate": 0.00015600000000000002, | |
| "loss": 1.2761, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.36334967613220215, | |
| "learning_rate": 0.00017600000000000002, | |
| "loss": 1.3073, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3472222222222222, | |
| "grad_norm": 0.3194734454154968, | |
| "learning_rate": 0.000196, | |
| "loss": 1.2374, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3819444444444444, | |
| "grad_norm": 0.4422107934951782, | |
| "learning_rate": 0.00019790575916230367, | |
| "loss": 1.2243, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.36031872034072876, | |
| "learning_rate": 0.00019528795811518326, | |
| "loss": 1.236, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4513888888888889, | |
| "grad_norm": 0.40430501103401184, | |
| "learning_rate": 0.00019267015706806283, | |
| "loss": 1.1484, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.4861111111111111, | |
| "grad_norm": 0.39032667875289917, | |
| "learning_rate": 0.00019005235602094243, | |
| "loss": 1.1352, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5208333333333334, | |
| "grad_norm": 0.3691488206386566, | |
| "learning_rate": 0.00018743455497382202, | |
| "loss": 1.0341, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 0.3820245563983917, | |
| "learning_rate": 0.00018481675392670156, | |
| "loss": 0.9911, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5902777777777778, | |
| "grad_norm": 0.40899762511253357, | |
| "learning_rate": 0.00018219895287958115, | |
| "loss": 1.077, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.3846423029899597, | |
| "learning_rate": 0.00017958115183246075, | |
| "loss": 1.1241, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6597222222222222, | |
| "grad_norm": 0.43537914752960205, | |
| "learning_rate": 0.00017696335078534032, | |
| "loss": 1.0522, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 0.40991654992103577, | |
| "learning_rate": 0.0001743455497382199, | |
| "loss": 1.0116, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "eval_loss": 1.053038239479065, | |
| "eval_runtime": 19.8507, | |
| "eval_samples_per_second": 9.622, | |
| "eval_steps_per_second": 1.612, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7291666666666666, | |
| "grad_norm": 0.4987630248069763, | |
| "learning_rate": 0.00017172774869109948, | |
| "loss": 0.9179, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7638888888888888, | |
| "grad_norm": 0.44468897581100464, | |
| "learning_rate": 0.00016910994764397907, | |
| "loss": 1.0694, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7986111111111112, | |
| "grad_norm": 0.4741359055042267, | |
| "learning_rate": 0.00016649214659685867, | |
| "loss": 1.024, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.47767335176467896, | |
| "learning_rate": 0.0001638743455497382, | |
| "loss": 0.9636, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.8680555555555556, | |
| "grad_norm": 0.45077380537986755, | |
| "learning_rate": 0.0001612565445026178, | |
| "loss": 1.0373, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9027777777777778, | |
| "grad_norm": 0.43459218740463257, | |
| "learning_rate": 0.0001586387434554974, | |
| "loss": 0.9578, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.5079038739204407, | |
| "learning_rate": 0.00015602094240837696, | |
| "loss": 0.8932, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 0.48626700043678284, | |
| "learning_rate": 0.00015340314136125656, | |
| "loss": 0.9151, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.0069444444444444, | |
| "grad_norm": 0.5171248316764832, | |
| "learning_rate": 0.00015078534031413612, | |
| "loss": 0.9027, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.0416666666666667, | |
| "grad_norm": 0.6200463175773621, | |
| "learning_rate": 0.00014816753926701572, | |
| "loss": 0.6928, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0763888888888888, | |
| "grad_norm": 0.48823609948158264, | |
| "learning_rate": 0.00014554973821989531, | |
| "loss": 0.7165, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 0.5404874086380005, | |
| "learning_rate": 0.00014293193717277485, | |
| "loss": 0.6827, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.1458333333333333, | |
| "grad_norm": 0.46143069863319397, | |
| "learning_rate": 0.00014031413612565445, | |
| "loss": 0.7171, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.1805555555555556, | |
| "grad_norm": 0.5935544371604919, | |
| "learning_rate": 0.00013769633507853404, | |
| "loss": 0.7044, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.2152777777777777, | |
| "grad_norm": 0.5715068578720093, | |
| "learning_rate": 0.0001350785340314136, | |
| "loss": 0.6636, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.585377037525177, | |
| "learning_rate": 0.0001324607329842932, | |
| "loss": 0.679, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.2847222222222223, | |
| "grad_norm": 0.5465496182441711, | |
| "learning_rate": 0.00012984293193717277, | |
| "loss": 0.647, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.3194444444444444, | |
| "grad_norm": 0.5915805697441101, | |
| "learning_rate": 0.00012722513089005237, | |
| "loss": 0.6305, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.3541666666666667, | |
| "grad_norm": 0.5672898888587952, | |
| "learning_rate": 0.00012460732984293196, | |
| "loss": 0.6799, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "grad_norm": 0.735905110836029, | |
| "learning_rate": 0.00012198952879581151, | |
| "loss": 0.6022, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "eval_loss": 0.7630631327629089, | |
| "eval_runtime": 19.802, | |
| "eval_samples_per_second": 9.646, | |
| "eval_steps_per_second": 1.616, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.4236111111111112, | |
| "grad_norm": 0.6561821699142456, | |
| "learning_rate": 0.0001193717277486911, | |
| "loss": 0.6586, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.4583333333333333, | |
| "grad_norm": 0.5606387853622437, | |
| "learning_rate": 0.00011675392670157068, | |
| "loss": 0.5827, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.4930555555555556, | |
| "grad_norm": 0.7301760315895081, | |
| "learning_rate": 0.00011413612565445027, | |
| "loss": 0.6343, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.5277777777777777, | |
| "grad_norm": 0.6215537190437317, | |
| "learning_rate": 0.00011151832460732985, | |
| "loss": 0.6967, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.5844612121582031, | |
| "learning_rate": 0.00010890052356020943, | |
| "loss": 0.5831, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.5972222222222223, | |
| "grad_norm": 0.6531478762626648, | |
| "learning_rate": 0.00010628272251308901, | |
| "loss": 0.5586, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.6319444444444444, | |
| "grad_norm": 0.6617570519447327, | |
| "learning_rate": 0.0001036649214659686, | |
| "loss": 0.4622, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.5876675844192505, | |
| "learning_rate": 0.00010104712041884816, | |
| "loss": 0.4863, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.7013888888888888, | |
| "grad_norm": 0.6678897142410278, | |
| "learning_rate": 9.842931937172776e-05, | |
| "loss": 0.4397, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.7361111111111112, | |
| "grad_norm": 0.6826710104942322, | |
| "learning_rate": 9.581151832460732e-05, | |
| "loss": 0.4996, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.7708333333333335, | |
| "grad_norm": 0.651406466960907, | |
| "learning_rate": 9.319371727748692e-05, | |
| "loss": 0.6108, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.8055555555555556, | |
| "grad_norm": 0.6858854293823242, | |
| "learning_rate": 9.05759162303665e-05, | |
| "loss": 0.4929, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.8402777777777777, | |
| "grad_norm": 0.6023811101913452, | |
| "learning_rate": 8.795811518324608e-05, | |
| "loss": 0.4669, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.615983784198761, | |
| "learning_rate": 8.534031413612566e-05, | |
| "loss": 0.59, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.9097222222222223, | |
| "grad_norm": 0.7829121351242065, | |
| "learning_rate": 8.272251308900524e-05, | |
| "loss": 0.5846, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.9444444444444444, | |
| "grad_norm": 0.6411824226379395, | |
| "learning_rate": 8.010471204188482e-05, | |
| "loss": 0.413, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.9791666666666665, | |
| "grad_norm": 0.6091701984405518, | |
| "learning_rate": 7.74869109947644e-05, | |
| "loss": 0.4117, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.013888888888889, | |
| "grad_norm": 0.5939719080924988, | |
| "learning_rate": 7.486910994764398e-05, | |
| "loss": 0.3464, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.048611111111111, | |
| "grad_norm": 0.8333393335342407, | |
| "learning_rate": 7.225130890052356e-05, | |
| "loss": 0.2968, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.0833333333333335, | |
| "grad_norm": 0.5424718260765076, | |
| "learning_rate": 6.963350785340315e-05, | |
| "loss": 0.2427, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.0833333333333335, | |
| "eval_loss": 0.5089412927627563, | |
| "eval_runtime": 19.8357, | |
| "eval_samples_per_second": 9.629, | |
| "eval_steps_per_second": 1.613, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.1180555555555554, | |
| "grad_norm": 0.6733362078666687, | |
| "learning_rate": 6.701570680628273e-05, | |
| "loss": 0.2759, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.1527777777777777, | |
| "grad_norm": 0.63869708776474, | |
| "learning_rate": 6.439790575916231e-05, | |
| "loss": 0.2387, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.5781670808792114, | |
| "learning_rate": 6.178010471204189e-05, | |
| "loss": 0.2465, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 0.6625104546546936, | |
| "learning_rate": 5.916230366492147e-05, | |
| "loss": 0.2452, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.2569444444444446, | |
| "grad_norm": 0.6529480218887329, | |
| "learning_rate": 5.654450261780106e-05, | |
| "loss": 0.1973, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.2916666666666665, | |
| "grad_norm": 0.5636107325553894, | |
| "learning_rate": 5.3926701570680624e-05, | |
| "loss": 0.2354, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.326388888888889, | |
| "grad_norm": 0.4926435649394989, | |
| "learning_rate": 5.130890052356021e-05, | |
| "loss": 0.1707, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.361111111111111, | |
| "grad_norm": 0.6436705589294434, | |
| "learning_rate": 4.869109947643979e-05, | |
| "loss": 0.245, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.3958333333333335, | |
| "grad_norm": 0.6189023852348328, | |
| "learning_rate": 4.6073298429319374e-05, | |
| "loss": 0.2417, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.4305555555555554, | |
| "grad_norm": 0.6670970320701599, | |
| "learning_rate": 4.3455497382198955e-05, | |
| "loss": 0.2047, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.4652777777777777, | |
| "grad_norm": 0.47659561038017273, | |
| "learning_rate": 4.0837696335078535e-05, | |
| "loss": 0.1757, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.4434764087200165, | |
| "learning_rate": 3.8219895287958116e-05, | |
| "loss": 0.1829, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.5347222222222223, | |
| "grad_norm": 0.5809481143951416, | |
| "learning_rate": 3.56020942408377e-05, | |
| "loss": 0.1696, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.5694444444444446, | |
| "grad_norm": 0.6467562317848206, | |
| "learning_rate": 3.298429319371728e-05, | |
| "loss": 0.1939, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.6041666666666665, | |
| "grad_norm": 0.8204236030578613, | |
| "learning_rate": 3.036649214659686e-05, | |
| "loss": 0.2066, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.638888888888889, | |
| "grad_norm": 0.6346448659896851, | |
| "learning_rate": 2.7748691099476443e-05, | |
| "loss": 0.2022, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.673611111111111, | |
| "grad_norm": 0.5839046835899353, | |
| "learning_rate": 2.513089005235602e-05, | |
| "loss": 0.2746, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.7083333333333335, | |
| "grad_norm": 0.7604065537452698, | |
| "learning_rate": 2.25130890052356e-05, | |
| "loss": 0.206, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.7430555555555554, | |
| "grad_norm": 0.5631184577941895, | |
| "learning_rate": 1.9895287958115183e-05, | |
| "loss": 0.188, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 0.5149999260902405, | |
| "learning_rate": 1.7277486910994763e-05, | |
| "loss": 0.1844, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "eval_loss": 0.34772011637687683, | |
| "eval_runtime": 19.7739, | |
| "eval_samples_per_second": 9.659, | |
| "eval_steps_per_second": 1.618, | |
| "step": 400 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 432, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.852700659705446e+16, | |
| "train_batch_size": 6, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |