| { | |
| "best_metric": 1.9183450937271118, | |
| "best_model_checkpoint": "models/GPT2_more_function_53/checkpoint-155680", | |
| "epoch": 10.0, | |
| "global_step": 155680, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1e-05, | |
| "loss": 6.0974, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2e-05, | |
| "loss": 3.7504, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1766, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4e-05, | |
| "loss": 2.8933, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 5e-05, | |
| "loss": 2.7463, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6e-05, | |
| "loss": 2.6511, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7e-05, | |
| "loss": 2.5785, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8e-05, | |
| "loss": 2.5222, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9e-05, | |
| "loss": 2.4721, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0001, | |
| "loss": 2.4349, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.931425041186162e-05, | |
| "loss": 2.4, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 9.862781438769907e-05, | |
| "loss": 2.3671, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 9.794206479956069e-05, | |
| "loss": 2.3407, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.725562877539814e-05, | |
| "loss": 2.3166, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.656987918725975e-05, | |
| "loss": 2.297, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.5112671312299146, | |
| "eval_loss": 2.240675687789917, | |
| "eval_runtime": 4.688, | |
| "eval_samples_per_second": 608.363, | |
| "eval_steps_per_second": 4.906, | |
| "step": 15568 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.58834431630972e-05, | |
| "loss": 2.2735, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.519769357495882e-05, | |
| "loss": 2.2539, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.451125755079627e-05, | |
| "loss": 2.2432, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.382550796265789e-05, | |
| "loss": 2.231, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.313907193849534e-05, | |
| "loss": 2.2216, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 9.245332235035696e-05, | |
| "loss": 2.2111, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.17668863261944e-05, | |
| "loss": 2.205, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 9.108113673805602e-05, | |
| "loss": 2.1938, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.039470071389346e-05, | |
| "loss": 2.1852, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 8.970895112575508e-05, | |
| "loss": 2.1784, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.902251510159254e-05, | |
| "loss": 2.1713, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.833676551345414e-05, | |
| "loss": 2.1637, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.765032948929161e-05, | |
| "loss": 2.1584, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 8.696457990115322e-05, | |
| "loss": 2.1517, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.627814387699067e-05, | |
| "loss": 2.1458, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 8.559239428885229e-05, | |
| "loss": 2.1414, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.5348781349736612, | |
| "eval_loss": 2.101914167404175, | |
| "eval_runtime": 4.8416, | |
| "eval_samples_per_second": 589.065, | |
| "eval_steps_per_second": 4.751, | |
| "step": 31136 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 8.490595826468973e-05, | |
| "loss": 2.1209, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 8.422020867655134e-05, | |
| "loss": 2.1174, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 8.35337726523888e-05, | |
| "loss": 2.1125, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 8.284733662822626e-05, | |
| "loss": 2.1084, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 8.216158704008787e-05, | |
| "loss": 2.1072, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 8.147515101592533e-05, | |
| "loss": 2.1029, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 8.078940142778694e-05, | |
| "loss": 2.1, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.010296540362439e-05, | |
| "loss": 2.097, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.941652937946183e-05, | |
| "loss": 2.0943, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 7.873077979132345e-05, | |
| "loss": 2.0907, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 7.80443437671609e-05, | |
| "loss": 2.0887, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 7.735859417902251e-05, | |
| "loss": 2.0848, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 7.667215815485998e-05, | |
| "loss": 2.0833, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.598640856672159e-05, | |
| "loss": 2.0794, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.529997254255904e-05, | |
| "loss": 2.0769, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.5440276750118718, | |
| "eval_loss": 2.0458028316497803, | |
| "eval_runtime": 4.8031, | |
| "eval_samples_per_second": 593.781, | |
| "eval_steps_per_second": 4.789, | |
| "step": 46704 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.461422295442066e-05, | |
| "loss": 2.0689, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.39277869302581e-05, | |
| "loss": 2.0524, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.324203734211971e-05, | |
| "loss": 2.0519, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.255560131795716e-05, | |
| "loss": 2.0508, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 7.186985172981878e-05, | |
| "loss": 2.0502, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 7.118341570565623e-05, | |
| "loss": 2.0495, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 7.049766611751785e-05, | |
| "loss": 2.0476, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.981123009335531e-05, | |
| "loss": 2.0457, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 6.912548050521693e-05, | |
| "loss": 2.0455, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 6.843904448105436e-05, | |
| "loss": 2.0432, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 6.775329489291598e-05, | |
| "loss": 2.0418, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 6.706685886875343e-05, | |
| "loss": 2.0401, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 6.638042284459088e-05, | |
| "loss": 2.0394, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 6.56946732564525e-05, | |
| "loss": 2.0376, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 6.500823723228996e-05, | |
| "loss": 2.0354, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 6.432248764415157e-05, | |
| "loss": 2.0352, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.5512446024892049, | |
| "eval_loss": 2.008540391921997, | |
| "eval_runtime": 4.7884, | |
| "eval_samples_per_second": 595.602, | |
| "eval_steps_per_second": 4.803, | |
| "step": 62272 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 6.363605161998903e-05, | |
| "loss": 2.0153, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 6.295030203185063e-05, | |
| "loss": 2.0116, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 6.226386600768808e-05, | |
| "loss": 2.0113, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 6.15781164195497e-05, | |
| "loss": 2.0106, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 6.089168039538715e-05, | |
| "loss": 2.0116, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 6.0205930807248766e-05, | |
| "loss": 2.0118, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 5.951949478308622e-05, | |
| "loss": 2.0089, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 5.883374519494783e-05, | |
| "loss": 2.0095, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 5.814730917078528e-05, | |
| "loss": 2.0088, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 5.746155958264689e-05, | |
| "loss": 2.0074, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 5.677512355848436e-05, | |
| "loss": 2.007, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 5.60886875343218e-05, | |
| "loss": 2.0058, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 5.540293794618342e-05, | |
| "loss": 2.0056, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 5.471650192202087e-05, | |
| "loss": 2.005, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 5.4030752333882485e-05, | |
| "loss": 2.0034, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.5555073936234829, | |
| "eval_loss": 1.9856412410736084, | |
| "eval_runtime": 4.7852, | |
| "eval_samples_per_second": 595.998, | |
| "eval_steps_per_second": 4.806, | |
| "step": 77840 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 5.3344316309719936e-05, | |
| "loss": 1.9962, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 5.2658566721581545e-05, | |
| "loss": 1.9772, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 5.197213069741901e-05, | |
| "loss": 1.9792, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 5.128638110928061e-05, | |
| "loss": 1.981, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 5.059994508511807e-05, | |
| "loss": 1.9816, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 4.9914195496979685e-05, | |
| "loss": 1.9803, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 4.9227759472817136e-05, | |
| "loss": 1.9806, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 4.854200988467875e-05, | |
| "loss": 1.9807, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 4.78555738605162e-05, | |
| "loss": 1.9806, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 4.716982427237782e-05, | |
| "loss": 1.9804, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 4.648338824821527e-05, | |
| "loss": 1.9793, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 4.579763866007688e-05, | |
| "loss": 1.9783, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 4.511120263591434e-05, | |
| "loss": 1.9783, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 4.442476661175179e-05, | |
| "loss": 1.9776, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 4.37390170236134e-05, | |
| "loss": 1.9764, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 4.3052580999450855e-05, | |
| "loss": 1.9762, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.5583510949630595, | |
| "eval_loss": 1.965383768081665, | |
| "eval_runtime": 4.7681, | |
| "eval_samples_per_second": 598.141, | |
| "eval_steps_per_second": 4.824, | |
| "step": 93408 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 4.236683141131247e-05, | |
| "loss": 1.9612, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 4.1680395387149915e-05, | |
| "loss": 1.9518, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 4.099464579901153e-05, | |
| "loss": 1.9535, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 4.030820977484899e-05, | |
| "loss": 1.9532, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 3.9622460186710605e-05, | |
| "loss": 1.9556, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 3.893602416254805e-05, | |
| "loss": 1.9562, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 3.8250274574409665e-05, | |
| "loss": 1.955, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 3.756383855024712e-05, | |
| "loss": 1.9569, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 3.687808896210873e-05, | |
| "loss": 1.9532, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 3.619165293794618e-05, | |
| "loss": 1.9554, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 3.55059033498078e-05, | |
| "loss": 1.9542, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 3.481946732564525e-05, | |
| "loss": 1.954, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 3.4133717737506865e-05, | |
| "loss": 1.9531, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 3.3447281713344316e-05, | |
| "loss": 1.9527, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 3.276153212520593e-05, | |
| "loss": 1.9536, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.5617856235712472, | |
| "eval_loss": 1.9492745399475098, | |
| "eval_runtime": 4.7695, | |
| "eval_samples_per_second": 597.965, | |
| "eval_steps_per_second": 4.822, | |
| "step": 108976 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 3.207509610104338e-05, | |
| "loss": 1.9502, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 3.138866007688084e-05, | |
| "loss": 1.9267, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 3.070291048874245e-05, | |
| "loss": 1.931, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 3.00164744645799e-05, | |
| "loss": 1.9306, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 2.9330724876441517e-05, | |
| "loss": 1.9327, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 2.8644288852278968e-05, | |
| "loss": 1.9317, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 2.795853926414058e-05, | |
| "loss": 1.9319, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 2.7272103239978035e-05, | |
| "loss": 1.9317, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 2.658635365183965e-05, | |
| "loss": 1.9319, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 2.5899917627677102e-05, | |
| "loss": 1.9321, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 2.5214168039538714e-05, | |
| "loss": 1.9316, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 2.452773201537617e-05, | |
| "loss": 1.9311, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 2.384198242723778e-05, | |
| "loss": 1.9314, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 2.3155546403075236e-05, | |
| "loss": 1.9297, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 2.2469110378912686e-05, | |
| "loss": 1.9302, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 2.1783360790774302e-05, | |
| "loss": 1.9281, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.5641958675221699, | |
| "eval_loss": 1.9358315467834473, | |
| "eval_runtime": 4.7877, | |
| "eval_samples_per_second": 595.693, | |
| "eval_steps_per_second": 4.804, | |
| "step": 124544 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 2.1096924766611753e-05, | |
| "loss": 1.9201, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 2.041117517847337e-05, | |
| "loss": 1.9086, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 1.972473915431082e-05, | |
| "loss": 1.9107, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 1.903830313014827e-05, | |
| "loss": 1.9099, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 1.8352553542009884e-05, | |
| "loss": 1.9112, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 1.7666117517847338e-05, | |
| "loss": 1.9099, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 1.697968149368479e-05, | |
| "loss": 1.9092, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 1.62939319055464e-05, | |
| "loss": 1.9101, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 1.5607495881383856e-05, | |
| "loss": 1.9104, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 1.492174629324547e-05, | |
| "loss": 1.909, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 1.4235310269082921e-05, | |
| "loss": 1.9087, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 1.3549560680944537e-05, | |
| "loss": 1.9079, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.2863124656781988e-05, | |
| "loss": 1.9074, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 1.217668863261944e-05, | |
| "loss": 1.9087, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 1.1490939044481055e-05, | |
| "loss": 1.9076, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 1.0804503020318507e-05, | |
| "loss": 1.9065, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.5664294154675266, | |
| "eval_loss": 1.9251179695129395, | |
| "eval_runtime": 4.7856, | |
| "eval_samples_per_second": 595.949, | |
| "eval_steps_per_second": 4.806, | |
| "step": 140112 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 1.0118753432180122e-05, | |
| "loss": 1.8912, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 9.432317408017572e-06, | |
| "loss": 1.8903, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 8.745881383855025e-06, | |
| "loss": 1.8892, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 8.059445359692478e-06, | |
| "loss": 1.8894, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 7.373695771554091e-06, | |
| "loss": 1.8906, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 6.6879461834157054e-06, | |
| "loss": 1.8913, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 6.001510159253158e-06, | |
| "loss": 1.8893, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 5.31507413509061e-06, | |
| "loss": 1.889, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 4.628638110928061e-06, | |
| "loss": 1.8894, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 3.942888522789677e-06, | |
| "loss": 1.8889, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 3.256452498627128e-06, | |
| "loss": 1.8899, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 2.5707029104887426e-06, | |
| "loss": 1.8881, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 1.8842668863261946e-06, | |
| "loss": 1.8868, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 1.1985172981878091e-06, | |
| "loss": 1.8879, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 5.120812740252608e-07, | |
| "loss": 1.8871, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.5679064836390543, | |
| "eval_loss": 1.9183450937271118, | |
| "eval_runtime": 4.7588, | |
| "eval_samples_per_second": 599.309, | |
| "eval_steps_per_second": 4.833, | |
| "step": 155680 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 155680, | |
| "total_flos": 1.30166022537216e+18, | |
| "train_loss": 2.0850467458413293, | |
| "train_runtime": 67879.3856, | |
| "train_samples_per_second": 293.558, | |
| "train_steps_per_second": 2.293 | |
| } | |
| ], | |
| "max_steps": 155680, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.30166022537216e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |