| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.581551618814905, | |
| "global_step": 15000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.0070493454179256e-06, | |
| "loss": 0.7017, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.0140986908358513e-06, | |
| "loss": 0.6899, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.0211480362537765e-06, | |
| "loss": 0.5926, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.028197381671703e-06, | |
| "loss": 0.4967, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 5.035246727089627e-06, | |
| "loss": 0.4748, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "eval_accuracy": 0.8350723045945451, | |
| "eval_loss": 0.3857437074184418, | |
| "eval_runtime": 24.3432, | |
| "eval_samples_per_second": 224.416, | |
| "eval_steps_per_second": 7.025, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.042296072507553e-06, | |
| "loss": 0.4313, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 7.0493454179254795e-06, | |
| "loss": 0.4277, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 8.056394763343405e-06, | |
| "loss": 0.4182, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.06344410876133e-06, | |
| "loss": 0.3942, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.995447746634584e-06, | |
| "loss": 0.3548, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "eval_accuracy": 0.8813838550247117, | |
| "eval_loss": 0.3002248704433441, | |
| "eval_runtime": 23.5094, | |
| "eval_samples_per_second": 232.375, | |
| "eval_steps_per_second": 7.274, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.930415555700073e-06, | |
| "loss": 0.3842, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.86538336476556e-06, | |
| "loss": 0.3613, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.800351173831047e-06, | |
| "loss": 0.357, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.735318982896535e-06, | |
| "loss": 0.3459, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.670286791962022e-06, | |
| "loss": 0.3244, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "eval_accuracy": 0.8949295259015193, | |
| "eval_loss": 0.2588460445404053, | |
| "eval_runtime": 29.4325, | |
| "eval_samples_per_second": 185.611, | |
| "eval_steps_per_second": 5.81, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 9.60525460102751e-06, | |
| "loss": 0.3215, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.540222410092996e-06, | |
| "loss": 0.3189, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.475190219158484e-06, | |
| "loss": 0.3007, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.410158028223972e-06, | |
| "loss": 0.3032, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.34512583728946e-06, | |
| "loss": 0.2853, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "eval_accuracy": 0.8989566172432729, | |
| "eval_loss": 0.2614916265010834, | |
| "eval_runtime": 26.1105, | |
| "eval_samples_per_second": 209.226, | |
| "eval_steps_per_second": 6.549, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.280093646354947e-06, | |
| "loss": 0.2773, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 9.215061455420435e-06, | |
| "loss": 0.277, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.15002926448592e-06, | |
| "loss": 0.2913, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.084997073551408e-06, | |
| "loss": 0.2767, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.019964882616896e-06, | |
| "loss": 0.277, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "eval_accuracy": 0.9011532125205931, | |
| "eval_loss": 0.24724067747592926, | |
| "eval_runtime": 24.2716, | |
| "eval_samples_per_second": 225.078, | |
| "eval_steps_per_second": 7.045, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 8.954932691682384e-06, | |
| "loss": 0.2723, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.889900500747872e-06, | |
| "loss": 0.2767, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.824868309813358e-06, | |
| "loss": 0.2747, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.759836118878845e-06, | |
| "loss": 0.264, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.694803927944333e-06, | |
| "loss": 0.2604, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "eval_accuracy": 0.9104887424492037, | |
| "eval_loss": 0.22219303250312805, | |
| "eval_runtime": 23.7933, | |
| "eval_samples_per_second": 229.602, | |
| "eval_steps_per_second": 7.187, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.62977173700982e-06, | |
| "loss": 0.2717, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.564739546075308e-06, | |
| "loss": 0.2733, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 8.499707355140794e-06, | |
| "loss": 0.245, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 8.434675164206282e-06, | |
| "loss": 0.2251, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 8.36964297327177e-06, | |
| "loss": 0.229, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "eval_accuracy": 0.9139666849716274, | |
| "eval_loss": 0.21798129379749298, | |
| "eval_runtime": 24.6881, | |
| "eval_samples_per_second": 221.28, | |
| "eval_steps_per_second": 6.926, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 8.304610782337258e-06, | |
| "loss": 0.2268, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 8.239578591402745e-06, | |
| "loss": 0.2317, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 8.174546400468233e-06, | |
| "loss": 0.2306, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 8.109514209533719e-06, | |
| "loss": 0.236, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 8.044482018599207e-06, | |
| "loss": 0.2257, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "eval_accuracy": 0.918542925132711, | |
| "eval_loss": 0.2288324385881424, | |
| "eval_runtime": 24.6216, | |
| "eval_samples_per_second": 221.878, | |
| "eval_steps_per_second": 6.945, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 7.979449827664695e-06, | |
| "loss": 0.2265, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.914417636730182e-06, | |
| "loss": 0.2137, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 7.84938544579567e-06, | |
| "loss": 0.2331, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.784353254861156e-06, | |
| "loss": 0.2212, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 7.719321063926645e-06, | |
| "loss": 0.2292, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "eval_accuracy": 0.919092073952041, | |
| "eval_loss": 0.2137669026851654, | |
| "eval_runtime": 25.2669, | |
| "eval_samples_per_second": 216.212, | |
| "eval_steps_per_second": 6.768, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 7.654288872992131e-06, | |
| "loss": 0.2285, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 7.58925668205762e-06, | |
| "loss": 0.2292, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 7.524224491123107e-06, | |
| "loss": 0.2153, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.459192300188594e-06, | |
| "loss": 0.2208, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.394160109254081e-06, | |
| "loss": 0.1947, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "eval_accuracy": 0.914332784184514, | |
| "eval_loss": 0.23720914125442505, | |
| "eval_runtime": 24.8628, | |
| "eval_samples_per_second": 219.726, | |
| "eval_steps_per_second": 6.878, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 7.329127918319568e-06, | |
| "loss": 0.1857, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 7.264095727385057e-06, | |
| "loss": 0.202, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.199063536450544e-06, | |
| "loss": 0.1804, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 7.134031345516031e-06, | |
| "loss": 0.1893, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 7.068999154581518e-06, | |
| "loss": 0.1946, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "eval_accuracy": 0.924400512538898, | |
| "eval_loss": 0.21092592179775238, | |
| "eval_runtime": 23.8519, | |
| "eval_samples_per_second": 229.038, | |
| "eval_steps_per_second": 7.169, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 7.003966963647005e-06, | |
| "loss": 0.1889, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.938934772712494e-06, | |
| "loss": 0.1834, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 6.873902581777981e-06, | |
| "loss": 0.1887, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 6.808870390843468e-06, | |
| "loss": 0.1865, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 6.743838199908955e-06, | |
| "loss": 0.1975, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "eval_accuracy": 0.9178107267069375, | |
| "eval_loss": 0.21930640935897827, | |
| "eval_runtime": 24.5268, | |
| "eval_samples_per_second": 222.736, | |
| "eval_steps_per_second": 6.972, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 6.678806008974443e-06, | |
| "loss": 0.1905, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 6.613773818039931e-06, | |
| "loss": 0.1925, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 6.5487416271054175e-06, | |
| "loss": 0.1962, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 6.483709436170905e-06, | |
| "loss": 0.1821, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 6.418677245236392e-06, | |
| "loss": 0.1832, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "eval_accuracy": 0.9207395204100312, | |
| "eval_loss": 0.22072818875312805, | |
| "eval_runtime": 25.907, | |
| "eval_samples_per_second": 210.87, | |
| "eval_steps_per_second": 6.601, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 6.35364505430188e-06, | |
| "loss": 0.1752, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 6.2886128633673675e-06, | |
| "loss": 0.1565, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 6.223580672432855e-06, | |
| "loss": 0.1647, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 6.158548481498342e-06, | |
| "loss": 0.1488, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 6.09351629056383e-06, | |
| "loss": 0.1639, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "eval_accuracy": 0.9207395204100312, | |
| "eval_loss": 0.2469523698091507, | |
| "eval_runtime": 24.3069, | |
| "eval_samples_per_second": 224.751, | |
| "eval_steps_per_second": 7.035, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 6.028484099629317e-06, | |
| "loss": 0.1652, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 5.9634519086948035e-06, | |
| "loss": 0.1675, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 5.898419717760292e-06, | |
| "loss": 0.1517, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 5.833387526825779e-06, | |
| "loss": 0.1543, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 5.768355335891267e-06, | |
| "loss": 0.1717, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "eval_accuracy": 0.9231191652937946, | |
| "eval_loss": 0.23586037755012512, | |
| "eval_runtime": 24.1493, | |
| "eval_samples_per_second": 226.218, | |
| "eval_steps_per_second": 7.081, | |
| "step": 15000 | |
| } | |
| ], | |
| "max_steps": 32740, | |
| "num_train_epochs": 10, | |
| "total_flos": 3.6794803116288e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |