| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "global_step": 6158, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.9675219227021763e-05, | |
| "loss": 2.0573, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.935043845404352e-05, | |
| "loss": 0.2273, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.902565768106528e-05, | |
| "loss": 0.1299, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.870087690808704e-05, | |
| "loss": 0.0909, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.8376096135108803e-05, | |
| "loss": 0.0606, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.8051315362130565e-05, | |
| "loss": 0.053, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.7726534589152323e-05, | |
| "loss": 0.0426, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.740175381617408e-05, | |
| "loss": 0.0228, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.7076973043195843e-05, | |
| "loss": 0.0266, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.6752192270217605e-05, | |
| "loss": 0.033, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.6427411497239367e-05, | |
| "loss": 0.0226, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.6102630724261125e-05, | |
| "loss": 0.03, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.5777849951282887e-05, | |
| "loss": 0.0236, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.5453069178304645e-05, | |
| "loss": 0.0328, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.5128288405326405e-05, | |
| "loss": 0.0103, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.4803507632348167e-05, | |
| "loss": 0.0135, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.4478726859369926e-05, | |
| "loss": 0.0058, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.4153946086391688e-05, | |
| "loss": 0.0212, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.3829165313413446e-05, | |
| "loss": 0.0179, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.3504384540435206e-05, | |
| "loss": 0.0216, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.3179603767456968e-05, | |
| "loss": 0.0249, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.2854822994478728e-05, | |
| "loss": 0.0455, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.253004222150049e-05, | |
| "loss": 0.0123, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.2205261448522248e-05, | |
| "loss": 0.0148, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.1880480675544008e-05, | |
| "loss": 0.0255, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.155569990256577e-05, | |
| "loss": 0.0055, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.123091912958753e-05, | |
| "loss": 0.0061, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.090613835660929e-05, | |
| "loss": 0.0229, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.058135758363105e-05, | |
| "loss": 0.007, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.025657681065281e-05, | |
| "loss": 0.0144, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.93179603767457e-06, | |
| "loss": 0.007, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.607015264696331e-06, | |
| "loss": 0.0056, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.282234491718091e-06, | |
| "loss": 0.0063, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.957453718739851e-06, | |
| "loss": 0.0193, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.632672945761611e-06, | |
| "loss": 0.0115, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.307892172783371e-06, | |
| "loss": 0.0035, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.983111399805133e-06, | |
| "loss": 0.009, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.658330626826891e-06, | |
| "loss": 0.0122, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.333549853848653e-06, | |
| "loss": 0.001, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.008769080870414e-06, | |
| "loss": 0.0003, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.683988307892173e-06, | |
| "loss": 0.0114, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.359207534913934e-06, | |
| "loss": 0.0036, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.034426761935695e-06, | |
| "loss": 0.0109, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.709645988957454e-06, | |
| "loss": 0.0006, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.384865215979215e-06, | |
| "loss": 0.005, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.0600844430009746e-06, | |
| "loss": 0.0046, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.735303670022735e-06, | |
| "loss": 0.0064, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.410522897044495e-06, | |
| "loss": 0.0063, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.085742124066255e-06, | |
| "loss": 0.0004, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.760961351088016e-06, | |
| "loss": 0.0106, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.436180578109776e-06, | |
| "loss": 0.004, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.111399805131536e-06, | |
| "loss": 0.0053, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.7866190321532966e-06, | |
| "loss": 0.0096, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.461838259175057e-06, | |
| "loss": 0.0009, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.1370574861968174e-06, | |
| "loss": 0.0068, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.8122767132185776e-06, | |
| "loss": 0.0054, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.487495940240338e-06, | |
| "loss": 0.0005, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.1627151672620982e-06, | |
| "loss": 0.0012, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.379343942838584e-07, | |
| "loss": 0.0093, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.131536213056187e-07, | |
| "loss": 0.0036, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.8837284832737905e-07, | |
| "loss": 0.0005, | |
| "step": 6100 | |
| } | |
| ], | |
| "max_steps": 6158, | |
| "num_train_epochs": 2, | |
| "total_flos": 2.1662462190359347e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |