| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.0, | |
| "global_step": 44996, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9444395057338435e-05, | |
| "loss": 2.1204, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.888879011467686e-05, | |
| "loss": 1.8331, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8333185172015294e-05, | |
| "loss": 1.7746, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.777758022935373e-05, | |
| "loss": 1.7646, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.722197528669215e-05, | |
| "loss": 1.7504, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.6666370344030586e-05, | |
| "loss": 1.7338, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.611076540136901e-05, | |
| "loss": 1.713, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.555516045870744e-05, | |
| "loss": 1.6983, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.499955551604587e-05, | |
| "loss": 1.7004, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.44439505733843e-05, | |
| "loss": 1.7119, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.388834563072273e-05, | |
| "loss": 1.6919, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.333274068806116e-05, | |
| "loss": 1.6658, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.2777135745399595e-05, | |
| "loss": 1.6654, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.222153080273802e-05, | |
| "loss": 1.671, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.1665925860076454e-05, | |
| "loss": 1.6673, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.1110320917414887e-05, | |
| "loss": 1.6761, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.055471597475331e-05, | |
| "loss": 1.6663, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.9999111032091745e-05, | |
| "loss": 1.6625, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.944350608943017e-05, | |
| "loss": 1.6562, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.8887901146768604e-05, | |
| "loss": 1.65, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.833229620410703e-05, | |
| "loss": 1.6459, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.777669126144546e-05, | |
| "loss": 1.6298, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.7221086318783896e-05, | |
| "loss": 1.6273, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.666548137612232e-05, | |
| "loss": 1.6184, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.6109876433460755e-05, | |
| "loss": 1.6359, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.555427149079919e-05, | |
| "loss": 1.6123, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.4998666548137613e-05, | |
| "loss": 1.6115, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.4443061605476046e-05, | |
| "loss": 1.5998, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.388745666281448e-05, | |
| "loss": 1.606, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.3331851720152905e-05, | |
| "loss": 1.6176, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.277624677749133e-05, | |
| "loss": 1.6295, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.2220641834829764e-05, | |
| "loss": 1.599, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.166503689216819e-05, | |
| "loss": 1.6, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.110943194950662e-05, | |
| "loss": 1.6144, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.0553827006845055e-05, | |
| "loss": 1.6121, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.9998222064183485e-05, | |
| "loss": 1.5894, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.9442617121521914e-05, | |
| "loss": 1.6109, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.8887012178860344e-05, | |
| "loss": 1.5859, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.8331407236198777e-05, | |
| "loss": 1.6049, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.7775802293537206e-05, | |
| "loss": 1.5907, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.7220197350875635e-05, | |
| "loss": 1.6062, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.6664592408214068e-05, | |
| "loss": 1.5877, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.610898746555249e-05, | |
| "loss": 1.6094, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.5553382522890924e-05, | |
| "loss": 1.6086, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.4997777580229356e-05, | |
| "loss": 1.5772, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.4442172637567782e-05, | |
| "loss": 1.5625, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.3886567694906215e-05, | |
| "loss": 1.5736, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.3330962752244645e-05, | |
| "loss": 1.5649, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 2.2775357809583074e-05, | |
| "loss": 1.5764, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 2.2219752866921507e-05, | |
| "loss": 1.5899, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.1664147924259936e-05, | |
| "loss": 1.5876, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.1108542981598366e-05, | |
| "loss": 1.5827, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.0552938038936795e-05, | |
| "loss": 1.5525, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.9997333096275224e-05, | |
| "loss": 1.5801, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.9441728153613657e-05, | |
| "loss": 1.5754, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.8886123210952087e-05, | |
| "loss": 1.5766, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.8330518268290516e-05, | |
| "loss": 1.5756, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.7774913325628945e-05, | |
| "loss": 1.5625, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.7219308382967375e-05, | |
| "loss": 1.5679, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.6663703440305804e-05, | |
| "loss": 1.5466, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.6108098497644237e-05, | |
| "loss": 1.5764, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.5552493554982667e-05, | |
| "loss": 1.5482, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.4996888612321098e-05, | |
| "loss": 1.5606, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4441283669659525e-05, | |
| "loss": 1.5573, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.3885678726997955e-05, | |
| "loss": 1.5609, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.3330073784336386e-05, | |
| "loss": 1.5708, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.2774468841674817e-05, | |
| "loss": 1.5692, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 1.2218863899013246e-05, | |
| "loss": 1.5581, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 1.1663258956351676e-05, | |
| "loss": 1.5494, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.1107654013690107e-05, | |
| "loss": 1.5708, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.0552049071028536e-05, | |
| "loss": 1.5547, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 9.996444128366966e-06, | |
| "loss": 1.5562, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 9.440839185705397e-06, | |
| "loss": 1.5494, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 8.885234243043826e-06, | |
| "loss": 1.5419, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 8.329629300382257e-06, | |
| "loss": 1.589, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 7.774024357720687e-06, | |
| "loss": 1.5522, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 7.218419415059116e-06, | |
| "loss": 1.5628, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.662814472397546e-06, | |
| "loss": 1.5303, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 6.1072095297359775e-06, | |
| "loss": 1.5304, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.551604587074407e-06, | |
| "loss": 1.5604, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 4.995999644412837e-06, | |
| "loss": 1.5528, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 4.4403947017512674e-06, | |
| "loss": 1.5454, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.884789759089697e-06, | |
| "loss": 1.5386, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.329184816428127e-06, | |
| "loss": 1.5465, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 2.773579873766557e-06, | |
| "loss": 1.5603, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.217974931104987e-06, | |
| "loss": 1.549, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.662369988443417e-06, | |
| "loss": 1.5602, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.1067650457818473e-06, | |
| "loss": 1.5377, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 5.511601031202774e-07, | |
| "loss": 1.5505, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "step": 44996, | |
| "total_flos": 2.071270394312233e+17, | |
| "train_loss": 1.610106024723899, | |
| "train_runtime": 44159.403, | |
| "train_samples_per_second": 6.113, | |
| "train_steps_per_second": 1.019 | |
| } | |
| ], | |
| "max_steps": 44996, | |
| "num_train_epochs": 4, | |
| "total_flos": 2.071270394312233e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |