| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9999770162495116, | |
| "global_step": 21754, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.885538291808403e-05, | |
| "loss": 3.5522, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.77061689804174e-05, | |
| "loss": 2.3372, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.655695504275076e-05, | |
| "loss": 2.0678, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.540774110508412e-05, | |
| "loss": 1.9329, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.425852716741749e-05, | |
| "loss": 1.8415, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.3109313229750856e-05, | |
| "loss": 1.7635, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.196009929208422e-05, | |
| "loss": 1.7149, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.0810885354417584e-05, | |
| "loss": 1.6758, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.9663969844626276e-05, | |
| "loss": 1.6338, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.8514755906959643e-05, | |
| "loss": 1.6054, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.7365541969293004e-05, | |
| "loss": 1.5783, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.6216328031626365e-05, | |
| "loss": 1.5492, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.5069412521835064e-05, | |
| "loss": 1.515, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.392019858416843e-05, | |
| "loss": 1.4988, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.27709846465018e-05, | |
| "loss": 1.4807, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.162177070883516e-05, | |
| "loss": 1.4654, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.047255677116852e-05, | |
| "loss": 1.446, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.9323342833501886e-05, | |
| "loss": 1.4331, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.817412889583525e-05, | |
| "loss": 1.4133, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.702491495816861e-05, | |
| "loss": 1.4061, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.587570102050198e-05, | |
| "loss": 1.3986, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.4726487082835342e-05, | |
| "loss": 1.3898, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3577273145168706e-05, | |
| "loss": 1.3683, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.24303576353774e-05, | |
| "loss": 1.3489, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.128114369771077e-05, | |
| "loss": 1.3377, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.013192976004413e-05, | |
| "loss": 1.3306, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.898501425025283e-05, | |
| "loss": 1.3215, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7835800312586192e-05, | |
| "loss": 1.3189, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.6686586374919556e-05, | |
| "loss": 1.3091, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.553737243725292e-05, | |
| "loss": 1.3026, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4388158499586286e-05, | |
| "loss": 1.2931, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.324124298979498e-05, | |
| "loss": 1.2871, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.2092029052128345e-05, | |
| "loss": 1.2694, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.094281511446171e-05, | |
| "loss": 1.2785, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.793601176795073e-06, | |
| "loss": 1.2747, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.644387239128435e-06, | |
| "loss": 1.2625, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.495173301461801e-06, | |
| "loss": 1.2545, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.345959363795165e-06, | |
| "loss": 1.2436, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.196745426128529e-06, | |
| "loss": 1.2379, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.047531488461892e-06, | |
| "loss": 1.2485, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.900615978670589e-06, | |
| "loss": 1.2311, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.7514020410039533e-06, | |
| "loss": 1.2309, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.021881033373173e-07, | |
| "loss": 1.2309, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 21754, | |
| "total_flos": 1.3729453149277133e+17, | |
| "train_loss": 1.50102462691398, | |
| "train_runtime": 6413.5937, | |
| "train_samples_per_second": 217.08, | |
| "train_steps_per_second": 3.392 | |
| } | |
| ], | |
| "max_steps": 21754, | |
| "num_train_epochs": 1, | |
| "total_flos": 1.3729453149277133e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |