| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 16.0, | |
| "global_step": 637840, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.9984322087043773e-05, | |
| "loss": 2.8529, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.996864417408755e-05, | |
| "loss": 2.3821, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.995296626113132e-05, | |
| "loss": 2.282, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.9937288348175092e-05, | |
| "loss": 2.1631, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9921610435218864e-05, | |
| "loss": 2.093, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.990593252226264e-05, | |
| "loss": 2.09, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.989025460930641e-05, | |
| "loss": 2.0326, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9874576696350182e-05, | |
| "loss": 2.0064, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9858898783393958e-05, | |
| "loss": 2.0513, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.984322087043773e-05, | |
| "loss": 1.955, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.98275429574815e-05, | |
| "loss": 1.9279, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9811865044525273e-05, | |
| "loss": 1.8879, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9796187131569048e-05, | |
| "loss": 1.9094, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.978050921861282e-05, | |
| "loss": 1.8483, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.976483130565659e-05, | |
| "loss": 1.7955, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9749153392700363e-05, | |
| "loss": 1.7358, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.973347547974414e-05, | |
| "loss": 1.7735, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.971779756678791e-05, | |
| "loss": 1.7997, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9702119653831685e-05, | |
| "loss": 1.6992, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9686441740875457e-05, | |
| "loss": 1.7818, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.967076382791923e-05, | |
| "loss": 1.7817, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9655085914963e-05, | |
| "loss": 1.7968, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9639408002006776e-05, | |
| "loss": 1.7211, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9623730089050548e-05, | |
| "loss": 1.7302, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.960805217609432e-05, | |
| "loss": 1.8177, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.959237426313809e-05, | |
| "loss": 1.6991, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9576696350181866e-05, | |
| "loss": 1.7491, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9561018437225638e-05, | |
| "loss": 1.7191, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9545340524269413e-05, | |
| "loss": 1.7411, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.952966261131318e-05, | |
| "loss": 1.6523, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9513984698356957e-05, | |
| "loss": 1.6879, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.949830678540073e-05, | |
| "loss": 1.6819, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9482628872444504e-05, | |
| "loss": 1.7571, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9466950959488275e-05, | |
| "loss": 1.6614, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9451273046532047e-05, | |
| "loss": 1.6589, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.943559513357582e-05, | |
| "loss": 1.6081, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9419917220619594e-05, | |
| "loss": 1.5725, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9404239307663366e-05, | |
| "loss": 1.6776, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9388561394707138e-05, | |
| "loss": 1.6364, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.937288348175091e-05, | |
| "loss": 1.6534, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9357205568794684e-05, | |
| "loss": 1.5984, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9341527655838456e-05, | |
| "loss": 1.6573, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.932584974288223e-05, | |
| "loss": 1.734, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.9310171829926e-05, | |
| "loss": 1.603, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.9294493916969775e-05, | |
| "loss": 1.6416, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9278816004013547e-05, | |
| "loss": 1.5898, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9263138091057322e-05, | |
| "loss": 1.6236, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.924746017810109e-05, | |
| "loss": 1.5564, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.9231782265144865e-05, | |
| "loss": 1.5903, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.9216104352188637e-05, | |
| "loss": 1.6229, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.9200426439232412e-05, | |
| "loss": 1.5936, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.9184748526276184e-05, | |
| "loss": 1.6015, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.9169070613319956e-05, | |
| "loss": 1.5906, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.9153392700363727e-05, | |
| "loss": 1.6663, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.9137714787407503e-05, | |
| "loss": 1.5678, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.9122036874451274e-05, | |
| "loss": 1.621, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.910635896149505e-05, | |
| "loss": 1.5988, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.9090681048538818e-05, | |
| "loss": 1.6418, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.9075003135582593e-05, | |
| "loss": 1.5442, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.9059325222626365e-05, | |
| "loss": 1.6137, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.904364730967014e-05, | |
| "loss": 1.5937, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.902796939671391e-05, | |
| "loss": 1.551, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.9012291483757683e-05, | |
| "loss": 1.5752, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.8996613570801455e-05, | |
| "loss": 1.6542, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.898093565784523e-05, | |
| "loss": 1.5128, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.8965257744889002e-05, | |
| "loss": 1.5094, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.8949579831932774e-05, | |
| "loss": 1.5501, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.8933901918976546e-05, | |
| "loss": 1.5973, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.891822400602032e-05, | |
| "loss": 1.534, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8902546093064093e-05, | |
| "loss": 1.5083, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.8886868180107864e-05, | |
| "loss": 1.612, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.887119026715164e-05, | |
| "loss": 1.5596, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.885551235419541e-05, | |
| "loss": 1.5497, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8839834441239183e-05, | |
| "loss": 1.6015, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8824156528282958e-05, | |
| "loss": 1.5206, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.880847861532673e-05, | |
| "loss": 1.5523, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.87928007023705e-05, | |
| "loss": 1.5119, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.8777122789414273e-05, | |
| "loss": 1.5777, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.876144487645805e-05, | |
| "loss": 1.5688, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.4448622465133667, | |
| "eval_runtime": 128.1797, | |
| "eval_samples_per_second": 276.456, | |
| "eval_steps_per_second": 34.561, | |
| "step": 39865 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.874576696350182e-05, | |
| "loss": 1.522, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.8730089050545592e-05, | |
| "loss": 1.5006, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.8714411137589367e-05, | |
| "loss": 1.5478, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.869873322463314e-05, | |
| "loss": 1.4551, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.868305531167691e-05, | |
| "loss": 1.5168, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.8667377398720682e-05, | |
| "loss": 1.4736, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.8651699485764458e-05, | |
| "loss": 1.4973, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.863602157280823e-05, | |
| "loss": 1.4207, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.8620343659852e-05, | |
| "loss": 1.4869, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.8604665746895773e-05, | |
| "loss": 1.4581, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.8588987833939548e-05, | |
| "loss": 1.4674, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.857330992098332e-05, | |
| "loss": 1.4434, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.8557632008027095e-05, | |
| "loss": 1.4838, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.8541954095070867e-05, | |
| "loss": 1.4814, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.852627618211464e-05, | |
| "loss": 1.4879, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.851059826915841e-05, | |
| "loss": 1.4201, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.8494920356202185e-05, | |
| "loss": 1.5135, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.8479242443245957e-05, | |
| "loss": 1.4917, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.846356453028973e-05, | |
| "loss": 1.4462, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.84478866173335e-05, | |
| "loss": 1.456, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.8432208704377276e-05, | |
| "loss": 1.4534, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.8416530791421048e-05, | |
| "loss": 1.4424, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8400852878464823e-05, | |
| "loss": 1.4568, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.838517496550859e-05, | |
| "loss": 1.4868, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.8369497052552366e-05, | |
| "loss": 1.5445, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.8353819139596138e-05, | |
| "loss": 1.4009, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.8338141226639913e-05, | |
| "loss": 1.4949, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.8322463313683685e-05, | |
| "loss": 1.4977, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.8306785400727457e-05, | |
| "loss": 1.4251, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.829110748777123e-05, | |
| "loss": 1.4671, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.8275429574815004e-05, | |
| "loss": 1.4663, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.8259751661858775e-05, | |
| "loss": 1.474, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.8244073748902547e-05, | |
| "loss": 1.4399, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.822839583594632e-05, | |
| "loss": 1.4721, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.8212717922990094e-05, | |
| "loss": 1.4915, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.8197040010033866e-05, | |
| "loss": 1.4602, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.818136209707764e-05, | |
| "loss": 1.4274, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.816568418412141e-05, | |
| "loss": 1.4175, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.8150006271165184e-05, | |
| "loss": 1.4513, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.8134328358208956e-05, | |
| "loss": 1.4499, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.811865044525273e-05, | |
| "loss": 1.4535, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.81029725322965e-05, | |
| "loss": 1.4235, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.8087294619340275e-05, | |
| "loss": 1.4936, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.8071616706384047e-05, | |
| "loss": 1.4303, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.8055938793427822e-05, | |
| "loss": 1.4397, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.8040260880471593e-05, | |
| "loss": 1.4463, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.8024582967515365e-05, | |
| "loss": 1.4258, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.8008905054559137e-05, | |
| "loss": 1.462, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.7993227141602912e-05, | |
| "loss": 1.4396, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.7977549228646684e-05, | |
| "loss": 1.4277, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.796187131569046e-05, | |
| "loss": 1.4342, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.7946193402734227e-05, | |
| "loss": 1.4458, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.7930515489778003e-05, | |
| "loss": 1.4409, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.7914837576821774e-05, | |
| "loss": 1.4699, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.789915966386555e-05, | |
| "loss": 1.4831, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.7883481750909318e-05, | |
| "loss": 1.3923, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.7867803837953093e-05, | |
| "loss": 1.4465, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.7852125924996865e-05, | |
| "loss": 1.4096, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.783644801204064e-05, | |
| "loss": 1.4535, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.782077009908441e-05, | |
| "loss": 1.4147, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.7805092186128183e-05, | |
| "loss": 1.3796, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.7789414273171955e-05, | |
| "loss": 1.3972, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.777373636021573e-05, | |
| "loss": 1.4675, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.7758058447259502e-05, | |
| "loss": 1.46, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.7742380534303274e-05, | |
| "loss": 1.4559, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.7726702621347046e-05, | |
| "loss": 1.4631, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.771102470839082e-05, | |
| "loss": 1.408, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.7695346795434593e-05, | |
| "loss": 1.4728, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.7679668882478368e-05, | |
| "loss": 1.4401, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.766399096952214e-05, | |
| "loss": 1.4551, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.764831305656591e-05, | |
| "loss": 1.4492, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.7632635143609683e-05, | |
| "loss": 1.4464, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.7616957230653458e-05, | |
| "loss": 1.4745, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.760127931769723e-05, | |
| "loss": 1.411, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7585601404741e-05, | |
| "loss": 1.3978, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7569923491784773e-05, | |
| "loss": 1.4891, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.755424557882855e-05, | |
| "loss": 1.4735, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.753856766587232e-05, | |
| "loss": 1.487, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.7522889752916092e-05, | |
| "loss": 1.4213, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.7507211839959867e-05, | |
| "loss": 1.4332, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.3822311162948608, | |
| "eval_runtime": 127.8424, | |
| "eval_samples_per_second": 277.185, | |
| "eval_steps_per_second": 34.652, | |
| "step": 79730 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.749153392700364e-05, | |
| "loss": 1.3539, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.747585601404741e-05, | |
| "loss": 1.3973, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.7460178101091186e-05, | |
| "loss": 1.3728, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.7444500188134958e-05, | |
| "loss": 1.3788, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.742882227517873e-05, | |
| "loss": 1.4604, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.74131443622225e-05, | |
| "loss": 1.3327, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.7397466449266276e-05, | |
| "loss": 1.3296, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.7381788536310048e-05, | |
| "loss": 1.3417, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.736611062335382e-05, | |
| "loss": 1.4489, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.7350432710397595e-05, | |
| "loss": 1.3476, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.7334754797441367e-05, | |
| "loss": 1.422, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.731907688448514e-05, | |
| "loss": 1.3592, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.730339897152891e-05, | |
| "loss": 1.41, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.7287721058572685e-05, | |
| "loss": 1.3894, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.7272043145616457e-05, | |
| "loss": 1.3512, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.725636523266023e-05, | |
| "loss": 1.3567, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.7240687319704e-05, | |
| "loss": 1.4404, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.7225009406747776e-05, | |
| "loss": 1.3535, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.7209331493791548e-05, | |
| "loss": 1.4144, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.7193653580835323e-05, | |
| "loss": 1.432, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.7177975667879094e-05, | |
| "loss": 1.3745, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.7162297754922866e-05, | |
| "loss": 1.375, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.7146619841966638e-05, | |
| "loss": 1.3853, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.7130941929010413e-05, | |
| "loss": 1.3474, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.7115264016054185e-05, | |
| "loss": 1.3424, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.7099586103097957e-05, | |
| "loss": 1.3599, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.708390819014173e-05, | |
| "loss": 1.4519, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.7068230277185504e-05, | |
| "loss": 1.4344, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.7052552364229275e-05, | |
| "loss": 1.3346, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.703687445127305e-05, | |
| "loss": 1.3485, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.702119653831682e-05, | |
| "loss": 1.3506, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.7005518625360594e-05, | |
| "loss": 1.3044, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.6989840712404366e-05, | |
| "loss": 1.3287, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.697416279944814e-05, | |
| "loss": 1.362, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.695848488649191e-05, | |
| "loss": 1.378, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.6942806973535684e-05, | |
| "loss": 1.4017, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6927129060579456e-05, | |
| "loss": 1.3816, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.691145114762323e-05, | |
| "loss": 1.3901, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.6895773234667003e-05, | |
| "loss": 1.3633, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.6880095321710775e-05, | |
| "loss": 1.3504, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.6864417408754547e-05, | |
| "loss": 1.336, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.6848739495798322e-05, | |
| "loss": 1.4251, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.6833061582842093e-05, | |
| "loss": 1.3247, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.681738366988587e-05, | |
| "loss": 1.381, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.6801705756929637e-05, | |
| "loss": 1.3824, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.6786027843973412e-05, | |
| "loss": 1.4192, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.6770349931017184e-05, | |
| "loss": 1.355, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.675467201806096e-05, | |
| "loss": 1.3567, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.6738994105104727e-05, | |
| "loss": 1.3636, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.6723316192148503e-05, | |
| "loss": 1.3647, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.6707638279192274e-05, | |
| "loss": 1.3875, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 1.669196036623605e-05, | |
| "loss": 1.3918, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.667628245327982e-05, | |
| "loss": 1.3435, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.6660604540323593e-05, | |
| "loss": 1.3854, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.6644926627367365e-05, | |
| "loss": 1.3943, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 1.662924871441114e-05, | |
| "loss": 1.375, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.661357080145491e-05, | |
| "loss": 1.3497, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.6597892888498683e-05, | |
| "loss": 1.3838, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.6582214975542455e-05, | |
| "loss": 1.3096, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.656653706258623e-05, | |
| "loss": 1.3499, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.6550859149630002e-05, | |
| "loss": 1.3998, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.6535181236673777e-05, | |
| "loss": 1.3906, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.6519503323717546e-05, | |
| "loss": 1.2859, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.650382541076132e-05, | |
| "loss": 1.3169, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.6488147497805093e-05, | |
| "loss": 1.3822, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.6472469584848868e-05, | |
| "loss": 1.4047, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.645679167189264e-05, | |
| "loss": 1.3801, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.644111375893641e-05, | |
| "loss": 1.3764, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.6425435845980183e-05, | |
| "loss": 1.33, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.6409757933023958e-05, | |
| "loss": 1.3235, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.639408002006773e-05, | |
| "loss": 1.3344, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.63784021071115e-05, | |
| "loss": 1.3674, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.6362724194155273e-05, | |
| "loss": 1.3511, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.634704628119905e-05, | |
| "loss": 1.3671, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.633136836824282e-05, | |
| "loss": 1.3888, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.6315690455286595e-05, | |
| "loss": 1.3589, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.6300012542330367e-05, | |
| "loss": 1.3533, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.628433462937414e-05, | |
| "loss": 1.2987, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.626865671641791e-05, | |
| "loss": 1.3448, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.6252978803461686e-05, | |
| "loss": 1.277, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.338686227798462, | |
| "eval_runtime": 128.3357, | |
| "eval_samples_per_second": 276.12, | |
| "eval_steps_per_second": 34.519, | |
| "step": 119595 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.6237300890505458e-05, | |
| "loss": 1.3447, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 1.622162297754923e-05, | |
| "loss": 1.2503, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.6205945064593e-05, | |
| "loss": 1.2342, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 1.6190267151636776e-05, | |
| "loss": 1.3394, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.6174589238680548e-05, | |
| "loss": 1.2895, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 1.615891132572432e-05, | |
| "loss": 1.4003, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 1.6143233412768095e-05, | |
| "loss": 1.3084, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.6127555499811867e-05, | |
| "loss": 1.2751, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.611187758685564e-05, | |
| "loss": 1.3414, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 1.609619967389941e-05, | |
| "loss": 1.3392, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 1.6080521760943185e-05, | |
| "loss": 1.3551, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 1.6064843847986957e-05, | |
| "loss": 1.2876, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.604916593503073e-05, | |
| "loss": 1.3126, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 1.6033488022074504e-05, | |
| "loss": 1.2531, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 1.6017810109118276e-05, | |
| "loss": 1.3762, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 1.6002132196162048e-05, | |
| "loss": 1.3384, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.5986454283205823e-05, | |
| "loss": 1.3136, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.5970776370249594e-05, | |
| "loss": 1.3164, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.5955098457293366e-05, | |
| "loss": 1.2976, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 1.5939420544337138e-05, | |
| "loss": 1.3044, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.5923742631380913e-05, | |
| "loss": 1.2693, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.5908064718424685e-05, | |
| "loss": 1.2781, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.5892386805468457e-05, | |
| "loss": 1.2856, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.587670889251223e-05, | |
| "loss": 1.2876, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.5861030979556004e-05, | |
| "loss": 1.3101, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 1.5845353066599775e-05, | |
| "loss": 1.3436, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.582967515364355e-05, | |
| "loss": 1.295, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.581399724068732e-05, | |
| "loss": 1.3083, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.5798319327731094e-05, | |
| "loss": 1.3935, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 1.5782641414774866e-05, | |
| "loss": 1.3764, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.576696350181864e-05, | |
| "loss": 1.2953, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.5751285588862413e-05, | |
| "loss": 1.3373, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.5735607675906184e-05, | |
| "loss": 1.367, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.5719929762949956e-05, | |
| "loss": 1.3062, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.570425184999373e-05, | |
| "loss": 1.276, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.5688573937037503e-05, | |
| "loss": 1.339, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.5672896024081278e-05, | |
| "loss": 1.3375, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.5657218111125047e-05, | |
| "loss": 1.3677, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 1.5641540198168822e-05, | |
| "loss": 1.2983, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 1.5625862285212593e-05, | |
| "loss": 1.3559, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.561018437225637e-05, | |
| "loss": 1.3124, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.5594506459300137e-05, | |
| "loss": 1.373, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.5578828546343912e-05, | |
| "loss": 1.2698, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.5563150633387684e-05, | |
| "loss": 1.2757, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.554747272043146e-05, | |
| "loss": 1.3387, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.553179480747523e-05, | |
| "loss": 1.3115, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 1.5516116894519003e-05, | |
| "loss": 1.3385, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.5500438981562774e-05, | |
| "loss": 1.3294, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 1.548476106860655e-05, | |
| "loss": 1.3153, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 1.546908315565032e-05, | |
| "loss": 1.2989, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 1.5453405242694093e-05, | |
| "loss": 1.2992, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 1.5437727329737865e-05, | |
| "loss": 1.3188, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.542204941678164e-05, | |
| "loss": 1.3685, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 1.540637150382541e-05, | |
| "loss": 1.3352, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 1.5390693590869187e-05, | |
| "loss": 1.3314, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.5375015677912955e-05, | |
| "loss": 1.3009, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 1.535933776495673e-05, | |
| "loss": 1.2473, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 1.5343659852000502e-05, | |
| "loss": 1.3388, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.5327981939044277e-05, | |
| "loss": 1.3062, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 1.531230402608805e-05, | |
| "loss": 1.3164, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.529662611313182e-05, | |
| "loss": 1.2968, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.5280948200175593e-05, | |
| "loss": 1.2951, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.5265270287219368e-05, | |
| "loss": 1.3349, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.524959237426314e-05, | |
| "loss": 1.3453, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.5233914461306911e-05, | |
| "loss": 1.3503, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.5218236548350685e-05, | |
| "loss": 1.2881, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.5202558635394458e-05, | |
| "loss": 1.265, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 1.5186880722438232e-05, | |
| "loss": 1.2804, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.5171202809482003e-05, | |
| "loss": 1.3297, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 1.5155524896525775e-05, | |
| "loss": 1.2666, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.5139846983569548e-05, | |
| "loss": 1.2965, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 1.5124169070613322e-05, | |
| "loss": 1.3152, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.5108491157657095e-05, | |
| "loss": 1.3266, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 1.5092813244700865e-05, | |
| "loss": 1.3224, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.5077135331744639e-05, | |
| "loss": 1.3181, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.5061457418788412e-05, | |
| "loss": 1.2806, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.5045779505832186e-05, | |
| "loss": 1.3513, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 1.503010159287596e-05, | |
| "loss": 1.3295, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 1.501442367991973e-05, | |
| "loss": 1.3483, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.283326506614685, | |
| "eval_runtime": 128.3046, | |
| "eval_samples_per_second": 276.187, | |
| "eval_steps_per_second": 34.527, | |
| "step": 159460 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.4998745766963503e-05, | |
| "loss": 1.2967, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 1.4983067854007276e-05, | |
| "loss": 1.3119, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 1.496738994105105e-05, | |
| "loss": 1.2953, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.495171202809482e-05, | |
| "loss": 1.2804, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 1.4936034115138593e-05, | |
| "loss": 1.2275, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 1.4920356202182367e-05, | |
| "loss": 1.2728, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 1.490467828922614e-05, | |
| "loss": 1.2205, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 1.4889000376269914e-05, | |
| "loss": 1.2487, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.4873322463313684e-05, | |
| "loss": 1.2796, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 1.4857644550357457e-05, | |
| "loss": 1.2288, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 1.484196663740123e-05, | |
| "loss": 1.3309, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 1.4826288724445004e-05, | |
| "loss": 1.2506, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.4810610811488774e-05, | |
| "loss": 1.2754, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 1.4794932898532548e-05, | |
| "loss": 1.2578, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 1.4779254985576321e-05, | |
| "loss": 1.2982, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 1.4763577072620094e-05, | |
| "loss": 1.2595, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 1.4747899159663868e-05, | |
| "loss": 1.2515, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 1.4732221246707638e-05, | |
| "loss": 1.2115, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 1.4716543333751411e-05, | |
| "loss": 1.2686, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 1.4700865420795185e-05, | |
| "loss": 1.3288, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 1.4685187507838958e-05, | |
| "loss": 1.2665, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 1.4669509594882732e-05, | |
| "loss": 1.2856, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.4653831681926502e-05, | |
| "loss": 1.216, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 1.4638153768970275e-05, | |
| "loss": 1.2685, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 1.4622475856014049e-05, | |
| "loss": 1.2446, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 1.4606797943057822e-05, | |
| "loss": 1.2396, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 1.4591120030101594e-05, | |
| "loss": 1.3141, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.4575442117145366e-05, | |
| "loss": 1.2931, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 1.455976420418914e-05, | |
| "loss": 1.2463, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 1.4544086291232913e-05, | |
| "loss": 1.2987, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 1.4528408378276686e-05, | |
| "loss": 1.2794, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 1.4512730465320458e-05, | |
| "loss": 1.3229, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 1.449705255236423e-05, | |
| "loss": 1.3247, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 1.4481374639408003e-05, | |
| "loss": 1.2707, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 1.4465696726451776e-05, | |
| "loss": 1.3131, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 1.4450018813495548e-05, | |
| "loss": 1.2587, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 1.4434340900539322e-05, | |
| "loss": 1.2548, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.4418662987583093e-05, | |
| "loss": 1.2543, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 1.4402985074626867e-05, | |
| "loss": 1.2267, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.438730716167064e-05, | |
| "loss": 1.2485, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 1.4371629248714412e-05, | |
| "loss": 1.2508, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.4355951335758186e-05, | |
| "loss": 1.2642, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.4340273422801957e-05, | |
| "loss": 1.2739, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.432459550984573e-05, | |
| "loss": 1.2474, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.4308917596889503e-05, | |
| "loss": 1.3511, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.4293239683933276e-05, | |
| "loss": 1.2219, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.427756177097705e-05, | |
| "loss": 1.2916, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.4261883858020821e-05, | |
| "loss": 1.2512, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.4246205945064595e-05, | |
| "loss": 1.2476, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.4230528032108366e-05, | |
| "loss": 1.2676, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.421485011915214e-05, | |
| "loss": 1.2402, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.4199172206195913e-05, | |
| "loss": 1.2829, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.4183494293239685e-05, | |
| "loss": 1.2755, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.4167816380283457e-05, | |
| "loss": 1.2779, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.415213846732723e-05, | |
| "loss": 1.2341, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 1.4136460554371004e-05, | |
| "loss": 1.2414, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.4120782641414777e-05, | |
| "loss": 1.2824, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.4105104728458549e-05, | |
| "loss": 1.2999, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.408942681550232e-05, | |
| "loss": 1.2051, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.4073748902546094e-05, | |
| "loss": 1.2794, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.4058070989589868e-05, | |
| "loss": 1.2951, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.4042393076633641e-05, | |
| "loss": 1.261, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.4026715163677413e-05, | |
| "loss": 1.2629, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.4011037250721185e-05, | |
| "loss": 1.2497, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.3995359337764958e-05, | |
| "loss": 1.2706, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.3979681424808732e-05, | |
| "loss": 1.343, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.3964003511852505e-05, | |
| "loss": 1.2364, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.3948325598896275e-05, | |
| "loss": 1.3043, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.3932647685940048e-05, | |
| "loss": 1.3173, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 1.3916969772983822e-05, | |
| "loss": 1.2572, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.3901291860027595e-05, | |
| "loss": 1.2196, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.3885613947071369e-05, | |
| "loss": 1.2879, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 1.3869936034115139e-05, | |
| "loss": 1.2211, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 1.3854258121158912e-05, | |
| "loss": 1.2857, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.3838580208202686e-05, | |
| "loss": 1.2577, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.382290229524646e-05, | |
| "loss": 1.2711, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 1.380722438229023e-05, | |
| "loss": 1.2587, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.3791546469334003e-05, | |
| "loss": 1.242, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.3775868556377776e-05, | |
| "loss": 1.28, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.376019064342155e-05, | |
| "loss": 1.2675, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": NaN, | |
| "eval_runtime": 128.227, | |
| "eval_samples_per_second": 276.354, | |
| "eval_steps_per_second": 34.548, | |
| "step": 199325 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.3744512730465323e-05, | |
| "loss": 1.2284, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.3728834817509093e-05, | |
| "loss": 1.2201, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 1.3713156904552867e-05, | |
| "loss": 1.2137, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.369747899159664e-05, | |
| "loss": 1.2371, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.3681801078640414e-05, | |
| "loss": 1.2993, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 1.3666123165684184e-05, | |
| "loss": 1.2606, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 1.3650445252727957e-05, | |
| "loss": 1.27, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.363476733977173e-05, | |
| "loss": 1.2601, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 1.3619089426815504e-05, | |
| "loss": 1.2119, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 1.3603411513859277e-05, | |
| "loss": 1.2293, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 1.3587733600903048e-05, | |
| "loss": 1.1723, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 1.3572055687946821e-05, | |
| "loss": 1.2246, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.3556377774990594e-05, | |
| "loss": 1.1749, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 1.3540699862034368e-05, | |
| "loss": 1.2344, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 1.3525021949078141e-05, | |
| "loss": 1.2247, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 1.3509344036121911e-05, | |
| "loss": 1.2209, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.3493666123165685e-05, | |
| "loss": 1.252, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 1.3477988210209458e-05, | |
| "loss": 1.1713, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 1.3462310297253232e-05, | |
| "loss": 1.2387, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 1.3446632384297002e-05, | |
| "loss": 1.2507, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 1.3430954471340775e-05, | |
| "loss": 1.2971, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 1.3415276558384549e-05, | |
| "loss": 1.2168, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 1.3399598645428322e-05, | |
| "loss": 1.2556, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 1.3383920732472096e-05, | |
| "loss": 1.2302, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 1.3368242819515866e-05, | |
| "loss": 1.261, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 1.3352564906559639e-05, | |
| "loss": 1.1894, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 1.3336886993603413e-05, | |
| "loss": 1.2134, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 1.3321209080647186e-05, | |
| "loss": 1.273, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 1.3305531167690958e-05, | |
| "loss": 1.3024, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 1.328985325473473e-05, | |
| "loss": 1.2547, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 1.3274175341778503e-05, | |
| "loss": 1.2567, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 1.3258497428822276e-05, | |
| "loss": 1.2505, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 1.324281951586605e-05, | |
| "loss": 1.3005, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 1.3227141602909822e-05, | |
| "loss": 1.2019, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 1.3211463689953593e-05, | |
| "loss": 1.228, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 1.3195785776997367e-05, | |
| "loss": 1.2223, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 1.318010786404114e-05, | |
| "loss": 1.2821, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 1.3164429951084912e-05, | |
| "loss": 1.2201, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 1.3148752038128686e-05, | |
| "loss": 1.2231, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 1.3133074125172457e-05, | |
| "loss": 1.2294, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 1.311739621221623e-05, | |
| "loss": 1.2198, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 1.3101718299260004e-05, | |
| "loss": 1.2677, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 1.3086040386303776e-05, | |
| "loss": 1.218, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 1.307036247334755e-05, | |
| "loss": 1.2484, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 1.3054684560391321e-05, | |
| "loss": 1.1905, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 1.3039006647435095e-05, | |
| "loss": 1.2688, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 1.3023328734478866e-05, | |
| "loss": 1.248, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 1.300765082152264e-05, | |
| "loss": 1.2758, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 1.2991972908566413e-05, | |
| "loss": 1.2298, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 1.2976294995610185e-05, | |
| "loss": 1.2808, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 1.2960617082653959e-05, | |
| "loss": 1.2008, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 1.294493916969773e-05, | |
| "loss": 1.2582, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 1.2929261256741504e-05, | |
| "loss": 1.209, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 1.2913583343785277e-05, | |
| "loss": 1.2933, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 1.2897905430829049e-05, | |
| "loss": 1.2576, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 1.2882227517872822e-05, | |
| "loss": 1.2619, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 1.2866549604916594e-05, | |
| "loss": 1.1988, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 1.2850871691960368e-05, | |
| "loss": 1.2358, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 1.2835193779004141e-05, | |
| "loss": 1.2377, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 1.2819515866047913e-05, | |
| "loss": 1.2357, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 1.2803837953091685e-05, | |
| "loss": 1.2496, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 1.2788160040135458e-05, | |
| "loss": 1.2092, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 1.2772482127179232e-05, | |
| "loss": 1.267, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 1.2756804214223005e-05, | |
| "loss": 1.2355, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 1.2741126301266777e-05, | |
| "loss": 1.2225, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 1.2725448388310548e-05, | |
| "loss": 1.2578, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.2709770475354322e-05, | |
| "loss": 1.2468, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 1.2694092562398095e-05, | |
| "loss": 1.2151, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 1.2678414649441869e-05, | |
| "loss": 1.1998, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.2662736736485639e-05, | |
| "loss": 1.2087, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 1.2647058823529412e-05, | |
| "loss": 1.2726, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.2631380910573186e-05, | |
| "loss": 1.2146, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.261570299761696e-05, | |
| "loss": 1.2373, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 1.2600025084660733e-05, | |
| "loss": 1.264, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 1.2584347171704503e-05, | |
| "loss": 1.2405, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.2568669258748276e-05, | |
| "loss": 1.2153, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.255299134579205e-05, | |
| "loss": 1.2191, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 1.2537313432835823e-05, | |
| "loss": 1.3123, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.2521635519879593e-05, | |
| "loss": 1.2528, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.2505957606923367e-05, | |
| "loss": 1.2381, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 1.254626989364624, | |
| "eval_runtime": 127.3598, | |
| "eval_samples_per_second": 278.235, | |
| "eval_steps_per_second": 34.783, | |
| "step": 239190 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.249027969396714e-05, | |
| "loss": 1.2663, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.2474601781010914e-05, | |
| "loss": 1.2548, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.2458923868054687e-05, | |
| "loss": 1.2001, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.2443245955098457e-05, | |
| "loss": 1.2247, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.242756804214223e-05, | |
| "loss": 1.2295, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.2411890129186004e-05, | |
| "loss": 1.1823, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.2396212216229777e-05, | |
| "loss": 1.1923, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 1.2380534303273551e-05, | |
| "loss": 1.1905, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.2364856390317321e-05, | |
| "loss": 1.1896, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.2349178477361094e-05, | |
| "loss": 1.1912, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.2333500564404868e-05, | |
| "loss": 1.235, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.2317822651448641e-05, | |
| "loss": 1.185, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.2302144738492411e-05, | |
| "loss": 1.2781, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.2286466825536185e-05, | |
| "loss": 1.2342, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.2270788912579958e-05, | |
| "loss": 1.2257, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.2255110999623732e-05, | |
| "loss": 1.2575, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.2239433086667505e-05, | |
| "loss": 1.2028, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.2223755173711275e-05, | |
| "loss": 1.2228, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.2208077260755049e-05, | |
| "loss": 1.1886, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.2192399347798822e-05, | |
| "loss": 1.1669, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.2176721434842596e-05, | |
| "loss": 1.1943, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.2161043521886366e-05, | |
| "loss": 1.2077, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.2145365608930139e-05, | |
| "loss": 1.2227, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.2129687695973913e-05, | |
| "loss": 1.2227, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.2114009783017686e-05, | |
| "loss": 1.1362, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.209833187006146e-05, | |
| "loss": 1.2037, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.208265395710523e-05, | |
| "loss": 1.2609, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.2066976044149003e-05, | |
| "loss": 1.1977, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.2051298131192776e-05, | |
| "loss": 1.1819, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.203562021823655e-05, | |
| "loss": 1.1479, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.2019942305280322e-05, | |
| "loss": 1.256, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.2004264392324093e-05, | |
| "loss": 1.1129, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.1988586479367867e-05, | |
| "loss": 1.2096, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.197290856641164e-05, | |
| "loss": 1.2323, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.1957230653455414e-05, | |
| "loss": 1.2027, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.1941552740499186e-05, | |
| "loss": 1.2175, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.1925874827542957e-05, | |
| "loss": 1.193, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.191019691458673e-05, | |
| "loss": 1.1808, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.1894519001630504e-05, | |
| "loss": 1.1994, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 1.1878841088674276e-05, | |
| "loss": 1.1686, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.186316317571805e-05, | |
| "loss": 1.1599, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.1847485262761821e-05, | |
| "loss": 1.2177, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.1831807349805595e-05, | |
| "loss": 1.2177, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.1816129436849368e-05, | |
| "loss": 1.1605, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.180045152389314e-05, | |
| "loss": 1.2487, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.1784773610936913e-05, | |
| "loss": 1.2641, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.1769095697980685e-05, | |
| "loss": 1.1597, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.1753417785024459e-05, | |
| "loss": 1.1846, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 1.1737739872068232e-05, | |
| "loss": 1.2127, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.1722061959112004e-05, | |
| "loss": 1.1935, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.1706384046155777e-05, | |
| "loss": 1.2079, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.1690706133199549e-05, | |
| "loss": 1.2393, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.1675028220243322e-05, | |
| "loss": 1.2295, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.1659350307287094e-05, | |
| "loss": 1.2593, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.1643672394330868e-05, | |
| "loss": 1.2002, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.1627994481374641e-05, | |
| "loss": 1.1717, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.1612316568418413e-05, | |
| "loss": 1.2141, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.1596638655462186e-05, | |
| "loss": 1.171, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.1580960742505958e-05, | |
| "loss": 1.1938, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.1565282829549731e-05, | |
| "loss": 1.21, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.1549604916593505e-05, | |
| "loss": 1.2357, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.1533927003637277e-05, | |
| "loss": 1.1811, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.1518249090681048e-05, | |
| "loss": 1.2406, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.1502571177724822e-05, | |
| "loss": 1.2038, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.1486893264768595e-05, | |
| "loss": 1.204, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.1471215351812369e-05, | |
| "loss": 1.207, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.145553743885614e-05, | |
| "loss": 1.1973, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.1439859525899912e-05, | |
| "loss": 1.2101, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.1424181612943686e-05, | |
| "loss": 1.1494, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.140850369998746e-05, | |
| "loss": 1.2214, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.1392825787031233e-05, | |
| "loss": 1.1631, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.1377147874075003e-05, | |
| "loss": 1.2273, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.1361469961118776e-05, | |
| "loss": 1.238, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.134579204816255e-05, | |
| "loss": 1.2387, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.1330114135206323e-05, | |
| "loss": 1.2499, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.1314436222250097e-05, | |
| "loss": 1.18, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.1298758309293867e-05, | |
| "loss": 1.2259, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 1.128308039633764e-05, | |
| "loss": 1.2246, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.1267402483381414e-05, | |
| "loss": 1.1923, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.1251724570425187e-05, | |
| "loss": 1.1841, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_loss": 1.2204227447509766, | |
| "eval_runtime": 127.6995, | |
| "eval_samples_per_second": 277.495, | |
| "eval_steps_per_second": 34.691, | |
| "step": 279055 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.123604665746896e-05, | |
| "loss": 1.1389, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.122036874451273e-05, | |
| "loss": 1.1898, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.1204690831556504e-05, | |
| "loss": 1.1146, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.1189012918600277e-05, | |
| "loss": 1.1572, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.1173335005644051e-05, | |
| "loss": 1.1553, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.1157657092687821e-05, | |
| "loss": 1.2099, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.1141979179731594e-05, | |
| "loss": 1.1562, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 1.1126301266775368e-05, | |
| "loss": 1.1754, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.1110623353819141e-05, | |
| "loss": 1.2016, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.1094945440862915e-05, | |
| "loss": 1.177, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.1079267527906685e-05, | |
| "loss": 1.2008, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.1063589614950458e-05, | |
| "loss": 1.2134, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 1.1047911701994232e-05, | |
| "loss": 1.1965, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.1032233789038005e-05, | |
| "loss": 1.1614, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.1016555876081775e-05, | |
| "loss": 1.2116, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.1000877963125549e-05, | |
| "loss": 1.1696, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.0985200050169322e-05, | |
| "loss": 1.192, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.0969522137213096e-05, | |
| "loss": 1.1865, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.0953844224256869e-05, | |
| "loss": 1.1716, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.0938166311300639e-05, | |
| "loss": 1.1395, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.0922488398344413e-05, | |
| "loss": 1.1886, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.0906810485388186e-05, | |
| "loss": 1.1592, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.089113257243196e-05, | |
| "loss": 1.1792, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.0875454659475731e-05, | |
| "loss": 1.2241, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.0859776746519503e-05, | |
| "loss": 1.2109, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.0844098833563276e-05, | |
| "loss": 1.1347, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.082842092060705e-05, | |
| "loss": 1.1454, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.0812743007650823e-05, | |
| "loss": 1.1888, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.0797065094694595e-05, | |
| "loss": 1.1764, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.0781387181738367e-05, | |
| "loss": 1.2194, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.076570926878214e-05, | |
| "loss": 1.1219, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.0750031355825914e-05, | |
| "loss": 1.2311, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.0734353442869687e-05, | |
| "loss": 1.2233, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.0718675529913459e-05, | |
| "loss": 1.2083, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.070299761695723e-05, | |
| "loss": 1.1935, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.0687319704001004e-05, | |
| "loss": 1.1264, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 1.0671641791044778e-05, | |
| "loss": 1.1662, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 1.065596387808855e-05, | |
| "loss": 1.159, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.0640285965132323e-05, | |
| "loss": 1.1865, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.0624608052176095e-05, | |
| "loss": 1.2219, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.0608930139219868e-05, | |
| "loss": 1.1702, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.0593252226263642e-05, | |
| "loss": 1.1198, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.0577574313307413e-05, | |
| "loss": 1.1777, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.0561896400351187e-05, | |
| "loss": 1.1786, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.0546218487394959e-05, | |
| "loss": 1.2348, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.0530540574438732e-05, | |
| "loss": 1.2165, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 1.0514862661482504e-05, | |
| "loss": 1.2002, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.0499184748526277e-05, | |
| "loss": 1.1752, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.0483506835570049e-05, | |
| "loss": 1.1828, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.0467828922613822e-05, | |
| "loss": 1.1647, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.0452151009657596e-05, | |
| "loss": 1.1328, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.0436473096701368e-05, | |
| "loss": 1.1921, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 1.0420795183745141e-05, | |
| "loss": 1.1367, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.0405117270788913e-05, | |
| "loss": 1.2171, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 1.0389439357832686e-05, | |
| "loss": 1.1139, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.0373761444876458e-05, | |
| "loss": 1.1636, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.0358083531920231e-05, | |
| "loss": 1.1886, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.0342405618964005e-05, | |
| "loss": 1.1854, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.0326727706007777e-05, | |
| "loss": 1.1162, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.031104979305155e-05, | |
| "loss": 1.1896, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 1.0295371880095322e-05, | |
| "loss": 1.181, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.0279693967139095e-05, | |
| "loss": 1.2068, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.0264016054182869e-05, | |
| "loss": 1.1469, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.024833814122664e-05, | |
| "loss": 1.1962, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.0232660228270412e-05, | |
| "loss": 1.2163, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.0216982315314186e-05, | |
| "loss": 1.1355, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 1.020130440235796e-05, | |
| "loss": 1.177, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0185626489401733e-05, | |
| "loss": 1.1534, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.0169948576445504e-05, | |
| "loss": 1.152, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.0154270663489276e-05, | |
| "loss": 1.1655, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.013859275053305e-05, | |
| "loss": 1.2176, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 1.0122914837576823e-05, | |
| "loss": 1.1287, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.0107236924620597e-05, | |
| "loss": 1.1938, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 1.0091559011664368e-05, | |
| "loss": 1.1679, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.007588109870814e-05, | |
| "loss": 1.2031, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 1.0060203185751914e-05, | |
| "loss": 1.1444, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.0044525272795687e-05, | |
| "loss": 1.1229, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 1.002884735983946e-05, | |
| "loss": 1.1438, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.001316944688323e-05, | |
| "loss": 1.1608, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 1.2094546556472778, | |
| "eval_runtime": 127.5825, | |
| "eval_samples_per_second": 277.75, | |
| "eval_steps_per_second": 34.723, | |
| "step": 318920 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 9.997491533927004e-06, | |
| "loss": 1.1374, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 9.981813620970777e-06, | |
| "loss": 1.1764, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.966135708014551e-06, | |
| "loss": 1.1906, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 9.950457795058323e-06, | |
| "loss": 1.1977, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 9.934779882102096e-06, | |
| "loss": 1.1281, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.919101969145868e-06, | |
| "loss": 1.119, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 9.903424056189641e-06, | |
| "loss": 1.1616, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.887746143233413e-06, | |
| "loss": 1.1467, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 9.872068230277187e-06, | |
| "loss": 1.1215, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 9.856390317320958e-06, | |
| "loss": 1.1575, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.840712404364732e-06, | |
| "loss": 1.139, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 9.825034491408505e-06, | |
| "loss": 1.1644, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.809356578452277e-06, | |
| "loss": 1.1423, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 9.79367866549605e-06, | |
| "loss": 1.1717, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.778000752539822e-06, | |
| "loss": 1.1742, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.762322839583596e-06, | |
| "loss": 1.1472, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 9.746644926627367e-06, | |
| "loss": 1.1373, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 9.73096701367114e-06, | |
| "loss": 1.1726, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 9.715289100714914e-06, | |
| "loss": 1.0968, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 9.699611187758686e-06, | |
| "loss": 1.1652, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 9.68393327480246e-06, | |
| "loss": 1.1436, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 9.668255361846231e-06, | |
| "loss": 1.079, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 9.652577448890005e-06, | |
| "loss": 1.1103, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 9.636899535933776e-06, | |
| "loss": 1.1668, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 9.62122162297755e-06, | |
| "loss": 1.1915, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 9.605543710021322e-06, | |
| "loss": 1.1656, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 9.589865797065095e-06, | |
| "loss": 1.1738, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 9.574187884108869e-06, | |
| "loss": 1.1476, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 9.55850997115264e-06, | |
| "loss": 1.1956, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 9.542832058196414e-06, | |
| "loss": 1.0962, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 9.527154145240186e-06, | |
| "loss": 1.1355, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 9.511476232283959e-06, | |
| "loss": 1.1325, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 9.49579831932773e-06, | |
| "loss": 1.1264, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 9.480120406371504e-06, | |
| "loss": 1.1675, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 9.464442493415278e-06, | |
| "loss": 1.2012, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 9.44876458045905e-06, | |
| "loss": 1.1367, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 9.433086667502823e-06, | |
| "loss": 1.1672, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 9.417408754546595e-06, | |
| "loss": 1.2382, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 9.401730841590368e-06, | |
| "loss": 1.0987, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 9.38605292863414e-06, | |
| "loss": 1.1812, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 9.370375015677913e-06, | |
| "loss": 1.1735, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 9.354697102721687e-06, | |
| "loss": 1.239, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 9.339019189765458e-06, | |
| "loss": 1.0647, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 9.323341276809232e-06, | |
| "loss": 1.116, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 9.307663363853004e-06, | |
| "loss": 1.1161, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 9.291985450896777e-06, | |
| "loss": 1.1656, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 9.27630753794055e-06, | |
| "loss": 1.0908, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 9.260629624984322e-06, | |
| "loss": 1.1451, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 9.244951712028096e-06, | |
| "loss": 1.1246, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 9.229273799071868e-06, | |
| "loss": 1.2057, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 9.213595886115641e-06, | |
| "loss": 1.1393, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 9.197917973159414e-06, | |
| "loss": 1.1479, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 9.182240060203186e-06, | |
| "loss": 1.1118, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 9.16656214724696e-06, | |
| "loss": 1.1616, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 9.150884234290731e-06, | |
| "loss": 1.135, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 9.135206321334505e-06, | |
| "loss": 1.1225, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 9.119528408378278e-06, | |
| "loss": 1.1603, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 9.10385049542205e-06, | |
| "loss": 1.1703, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 9.088172582465824e-06, | |
| "loss": 1.2262, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 9.072494669509595e-06, | |
| "loss": 1.06, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 9.056816756553369e-06, | |
| "loss": 1.1594, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 9.041138843597142e-06, | |
| "loss": 1.2018, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 9.025460930640914e-06, | |
| "loss": 1.1478, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 9.009783017684687e-06, | |
| "loss": 1.145, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 8.99410510472846e-06, | |
| "loss": 1.1299, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 8.978427191772233e-06, | |
| "loss": 1.1853, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 8.962749278816004e-06, | |
| "loss": 1.1524, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 8.947071365859778e-06, | |
| "loss": 1.2222, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 8.931393452903551e-06, | |
| "loss": 1.1312, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 8.915715539947323e-06, | |
| "loss": 1.171, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 8.900037626991097e-06, | |
| "loss": 1.1357, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 8.884359714034868e-06, | |
| "loss": 1.1231, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 8.868681801078642e-06, | |
| "loss": 1.1771, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 8.853003888122414e-06, | |
| "loss": 1.1985, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 8.837325975166187e-06, | |
| "loss": 1.1563, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 8.82164806220996e-06, | |
| "loss": 1.1187, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 8.805970149253732e-06, | |
| "loss": 1.1113, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 8.790292236297506e-06, | |
| "loss": 1.1305, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 8.774614323341277e-06, | |
| "loss": 1.1053, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 8.758936410385051e-06, | |
| "loss": 1.2045, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 1.181683897972107, | |
| "eval_runtime": 127.1715, | |
| "eval_samples_per_second": 278.647, | |
| "eval_steps_per_second": 34.835, | |
| "step": 358785 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 8.743258497428823e-06, | |
| "loss": 1.1085, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 8.727580584472596e-06, | |
| "loss": 1.0804, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 8.711902671516368e-06, | |
| "loss": 1.1311, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 8.696224758560141e-06, | |
| "loss": 1.159, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 8.680546845603915e-06, | |
| "loss": 1.1409, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 8.664868932647686e-06, | |
| "loss": 1.1436, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 8.64919101969146e-06, | |
| "loss": 1.1535, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 8.633513106735232e-06, | |
| "loss": 1.0645, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 8.617835193779005e-06, | |
| "loss": 1.1221, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 8.602157280822777e-06, | |
| "loss": 1.1127, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 8.58647936786655e-06, | |
| "loss": 1.1139, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 8.570801454910324e-06, | |
| "loss": 1.1128, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 8.555123541954096e-06, | |
| "loss": 1.1384, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 8.539445628997869e-06, | |
| "loss": 1.0713, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 8.52376771604164e-06, | |
| "loss": 1.1902, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 8.508089803085414e-06, | |
| "loss": 1.1502, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 8.492411890129186e-06, | |
| "loss": 1.136, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 8.47673397717296e-06, | |
| "loss": 1.0885, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 8.461056064216731e-06, | |
| "loss": 1.1144, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 8.445378151260505e-06, | |
| "loss": 1.1733, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 8.429700238304278e-06, | |
| "loss": 1.1403, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 8.41402232534805e-06, | |
| "loss": 1.145, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 8.398344412391823e-06, | |
| "loss": 1.0408, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 8.382666499435595e-06, | |
| "loss": 1.1128, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 8.366988586479369e-06, | |
| "loss": 1.1214, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 8.35131067352314e-06, | |
| "loss": 1.1047, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 8.335632760566914e-06, | |
| "loss": 1.1368, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 8.319954847610687e-06, | |
| "loss": 1.1855, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 8.304276934654459e-06, | |
| "loss": 1.1038, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 8.288599021698232e-06, | |
| "loss": 1.172, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 8.272921108742004e-06, | |
| "loss": 1.1118, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "learning_rate": 8.257243195785778e-06, | |
| "loss": 1.0809, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 8.24156528282955e-06, | |
| "loss": 1.1414, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 8.225887369873323e-06, | |
| "loss": 1.1448, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 8.210209456917095e-06, | |
| "loss": 1.1143, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 8.194531543960868e-06, | |
| "loss": 1.1027, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 8.178853631004642e-06, | |
| "loss": 1.1401, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 8.163175718048413e-06, | |
| "loss": 1.131, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 8.147497805092187e-06, | |
| "loss": 1.1049, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 8.131819892135958e-06, | |
| "loss": 1.1312, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 8.116141979179732e-06, | |
| "loss": 1.1163, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 8.100464066223505e-06, | |
| "loss": 1.105, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 8.084786153267277e-06, | |
| "loss": 1.1223, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 8.06910824031105e-06, | |
| "loss": 1.159, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 8.053430327354822e-06, | |
| "loss": 1.0807, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 8.037752414398596e-06, | |
| "loss": 1.1768, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 8.02207450144237e-06, | |
| "loss": 1.1399, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 8.006396588486141e-06, | |
| "loss": 1.1426, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 7.990718675529914e-06, | |
| "loss": 1.066, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 7.975040762573686e-06, | |
| "loss": 1.1044, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 7.95936284961746e-06, | |
| "loss": 1.1479, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 7.943684936661233e-06, | |
| "loss": 1.0977, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 7.928007023705005e-06, | |
| "loss": 1.1236, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 7.912329110748778e-06, | |
| "loss": 1.152, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 7.89665119779255e-06, | |
| "loss": 1.1036, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 7.880973284836324e-06, | |
| "loss": 1.1227, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 7.865295371880095e-06, | |
| "loss": 1.0814, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 7.849617458923869e-06, | |
| "loss": 1.158, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 7.833939545967642e-06, | |
| "loss": 1.1237, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 7.818261633011414e-06, | |
| "loss": 1.173, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 7.802583720055187e-06, | |
| "loss": 1.1304, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 7.78690580709896e-06, | |
| "loss": 1.0871, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 7.771227894142733e-06, | |
| "loss": 1.1715, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 7.755549981186506e-06, | |
| "loss": 1.132, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 7.739872068230278e-06, | |
| "loss": 1.1601, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 7.724194155274051e-06, | |
| "loss": 1.1632, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 7.708516242317823e-06, | |
| "loss": 1.1204, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.692838329361597e-06, | |
| "loss": 1.0789, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 7.67716041640537e-06, | |
| "loss": 1.1613, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 7.661482503449142e-06, | |
| "loss": 1.1628, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 7.645804590492915e-06, | |
| "loss": 1.0951, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 7.630126677536687e-06, | |
| "loss": 1.1478, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 7.6144487645804596e-06, | |
| "loss": 1.1075, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 7.598770851624232e-06, | |
| "loss": 1.1255, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 7.583092938668006e-06, | |
| "loss": 1.0771, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 7.567415025711777e-06, | |
| "loss": 1.1575, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 7.551737112755551e-06, | |
| "loss": 1.1018, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 7.5360591997993235e-06, | |
| "loss": 1.1161, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 7.520381286843096e-06, | |
| "loss": 1.1226, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 7.5047033738868695e-06, | |
| "loss": 1.1401, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_loss": 1.1810191869735718, | |
| "eval_runtime": 128.7944, | |
| "eval_samples_per_second": 275.136, | |
| "eval_steps_per_second": 34.396, | |
| "step": 398650 | |
| }, | |
| { | |
| "epoch": 10.01, | |
| "learning_rate": 7.489025460930641e-06, | |
| "loss": 1.0806, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 7.473347547974415e-06, | |
| "loss": 1.1156, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 7.4576696350181865e-06, | |
| "loss": 1.1052, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 10.05, | |
| "learning_rate": 7.44199172206196e-06, | |
| "loss": 1.0788, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 7.4263138091057325e-06, | |
| "loss": 1.0814, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 10.07, | |
| "learning_rate": 7.410635896149505e-06, | |
| "loss": 1.0861, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "learning_rate": 7.394957983193279e-06, | |
| "loss": 1.0579, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 10.1, | |
| "learning_rate": 7.37928007023705e-06, | |
| "loss": 1.0808, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 7.363602157280824e-06, | |
| "loss": 1.0664, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 7.3479242443245956e-06, | |
| "loss": 1.1089, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 7.332246331368369e-06, | |
| "loss": 1.041, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 7.316568418412141e-06, | |
| "loss": 1.1319, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 10.16, | |
| "learning_rate": 7.300890505455914e-06, | |
| "loss": 1.0706, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 7.285212592499688e-06, | |
| "loss": 1.1228, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 7.2695346795434594e-06, | |
| "loss": 1.0994, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 10.2, | |
| "learning_rate": 7.253856766587233e-06, | |
| "loss": 1.1089, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 7.238178853631005e-06, | |
| "loss": 1.0791, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 7.222500940674778e-06, | |
| "loss": 1.0507, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 7.20682302771855e-06, | |
| "loss": 1.0938, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 10.25, | |
| "learning_rate": 7.191145114762323e-06, | |
| "loss": 1.1371, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 7.175467201806097e-06, | |
| "loss": 1.1441, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 10.27, | |
| "learning_rate": 7.1597892888498685e-06, | |
| "loss": 1.1253, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 7.144111375893642e-06, | |
| "loss": 1.0862, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 10.3, | |
| "learning_rate": 7.128433462937414e-06, | |
| "loss": 1.0671, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 10.31, | |
| "learning_rate": 7.112755549981187e-06, | |
| "loss": 1.1025, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 7.09707763702496e-06, | |
| "loss": 1.112, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 10.33, | |
| "learning_rate": 7.081399724068732e-06, | |
| "loss": 1.1957, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 10.35, | |
| "learning_rate": 7.065721811112505e-06, | |
| "loss": 1.1165, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 10.36, | |
| "learning_rate": 7.050043898156278e-06, | |
| "loss": 1.0835, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 7.034365985200051e-06, | |
| "loss": 1.0938, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 10.39, | |
| "learning_rate": 7.018688072243824e-06, | |
| "loss": 1.1519, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "learning_rate": 7.003010159287596e-06, | |
| "loss": 1.1585, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 6.987332246331369e-06, | |
| "loss": 1.1207, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 6.9716543333751415e-06, | |
| "loss": 1.031, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 6.955976420418914e-06, | |
| "loss": 1.1214, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 10.45, | |
| "learning_rate": 6.9402985074626876e-06, | |
| "loss": 1.0939, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 10.46, | |
| "learning_rate": 6.92462059450646e-06, | |
| "loss": 1.1134, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 6.908942681550233e-06, | |
| "loss": 1.1446, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 10.49, | |
| "learning_rate": 6.893264768594005e-06, | |
| "loss": 1.0843, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 6.877586855637778e-06, | |
| "loss": 1.1155, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 10.51, | |
| "learning_rate": 6.8619089426815514e-06, | |
| "loss": 1.0844, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 10.52, | |
| "learning_rate": 6.846231029725323e-06, | |
| "loss": 1.1019, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 10.54, | |
| "learning_rate": 6.830553116769097e-06, | |
| "loss": 1.1204, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 6.814875203812868e-06, | |
| "loss": 1.1081, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 6.799197290856642e-06, | |
| "loss": 1.1415, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 10.57, | |
| "learning_rate": 6.783519377900415e-06, | |
| "loss": 1.0147, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 6.767841464944187e-06, | |
| "loss": 1.1238, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 6.7521635519879605e-06, | |
| "loss": 1.0905, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 10.61, | |
| "learning_rate": 6.736485639031732e-06, | |
| "loss": 1.0703, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 6.720807726075506e-06, | |
| "loss": 1.1379, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 10.64, | |
| "learning_rate": 6.7051298131192775e-06, | |
| "loss": 1.0753, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 10.65, | |
| "learning_rate": 6.689451900163051e-06, | |
| "loss": 1.0929, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 6.673773987206824e-06, | |
| "loss": 1.1311, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 6.658096074250596e-06, | |
| "loss": 1.1616, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 10.69, | |
| "learning_rate": 6.64241816129437e-06, | |
| "loss": 1.0553, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 6.626740248338141e-06, | |
| "loss": 1.126, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 6.611062335381915e-06, | |
| "loss": 1.1379, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 10.72, | |
| "learning_rate": 6.595384422425687e-06, | |
| "loss": 1.1454, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 6.57970650946946e-06, | |
| "loss": 1.135, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "learning_rate": 6.564028596513232e-06, | |
| "loss": 1.1442, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 6.548350683557005e-06, | |
| "loss": 1.051, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 10.77, | |
| "learning_rate": 6.532672770600779e-06, | |
| "loss": 1.0752, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 10.79, | |
| "learning_rate": 6.5169948576445505e-06, | |
| "loss": 1.117, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "learning_rate": 6.501316944688324e-06, | |
| "loss": 1.1297, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 6.485639031732096e-06, | |
| "loss": 1.0782, | |
| "step": 431000 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 6.469961118775869e-06, | |
| "loss": 1.0986, | |
| "step": 431500 | |
| }, | |
| { | |
| "epoch": 10.84, | |
| "learning_rate": 6.454283205819642e-06, | |
| "loss": 1.1466, | |
| "step": 432000 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 6.438605292863414e-06, | |
| "loss": 1.0771, | |
| "step": 432500 | |
| }, | |
| { | |
| "epoch": 10.86, | |
| "learning_rate": 6.422927379907187e-06, | |
| "loss": 1.0713, | |
| "step": 433000 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "learning_rate": 6.4072494669509596e-06, | |
| "loss": 1.0749, | |
| "step": 433500 | |
| }, | |
| { | |
| "epoch": 10.89, | |
| "learning_rate": 6.391571553994733e-06, | |
| "loss": 1.1106, | |
| "step": 434000 | |
| }, | |
| { | |
| "epoch": 10.9, | |
| "learning_rate": 6.375893641038506e-06, | |
| "loss": 1.0764, | |
| "step": 434500 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 6.360215728082278e-06, | |
| "loss": 1.1313, | |
| "step": 435000 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 6.344537815126051e-06, | |
| "loss": 1.0613, | |
| "step": 435500 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 6.3288599021698234e-06, | |
| "loss": 1.0666, | |
| "step": 436000 | |
| }, | |
| { | |
| "epoch": 10.95, | |
| "learning_rate": 6.313181989213596e-06, | |
| "loss": 1.1066, | |
| "step": 436500 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 6.2975040762573695e-06, | |
| "loss": 1.1211, | |
| "step": 437000 | |
| }, | |
| { | |
| "epoch": 10.97, | |
| "learning_rate": 6.281826163301142e-06, | |
| "loss": 1.1016, | |
| "step": 437500 | |
| }, | |
| { | |
| "epoch": 10.99, | |
| "learning_rate": 6.266148250344915e-06, | |
| "loss": 1.1414, | |
| "step": 438000 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 6.250470337388687e-06, | |
| "loss": 1.0748, | |
| "step": 438500 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "eval_loss": 1.1498711109161377, | |
| "eval_runtime": 127.548, | |
| "eval_samples_per_second": 277.825, | |
| "eval_steps_per_second": 34.732, | |
| "step": 438515 | |
| }, | |
| { | |
| "epoch": 11.01, | |
| "learning_rate": 6.23479242443246e-06, | |
| "loss": 1.083, | |
| "step": 439000 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 6.219114511476233e-06, | |
| "loss": 1.0758, | |
| "step": 439500 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 6.203436598520005e-06, | |
| "loss": 1.0222, | |
| "step": 440000 | |
| }, | |
| { | |
| "epoch": 11.05, | |
| "learning_rate": 6.187758685563779e-06, | |
| "loss": 1.0369, | |
| "step": 440500 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 6.17208077260755e-06, | |
| "loss": 1.0377, | |
| "step": 441000 | |
| }, | |
| { | |
| "epoch": 11.07, | |
| "learning_rate": 6.156402859651324e-06, | |
| "loss": 1.0744, | |
| "step": 441500 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 6.140724946695097e-06, | |
| "loss": 1.0682, | |
| "step": 442000 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "learning_rate": 6.125047033738869e-06, | |
| "loss": 1.0682, | |
| "step": 442500 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 6.1093691207826425e-06, | |
| "loss": 1.0586, | |
| "step": 443000 | |
| }, | |
| { | |
| "epoch": 11.13, | |
| "learning_rate": 6.093691207826414e-06, | |
| "loss": 1.1158, | |
| "step": 443500 | |
| }, | |
| { | |
| "epoch": 11.14, | |
| "learning_rate": 6.078013294870188e-06, | |
| "loss": 1.1443, | |
| "step": 444000 | |
| }, | |
| { | |
| "epoch": 11.15, | |
| "learning_rate": 6.0623353819139594e-06, | |
| "loss": 1.0765, | |
| "step": 444500 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 6.046657468957733e-06, | |
| "loss": 1.0657, | |
| "step": 445000 | |
| }, | |
| { | |
| "epoch": 11.18, | |
| "learning_rate": 6.030979556001506e-06, | |
| "loss": 1.0981, | |
| "step": 445500 | |
| }, | |
| { | |
| "epoch": 11.19, | |
| "learning_rate": 6.015301643045278e-06, | |
| "loss": 1.0796, | |
| "step": 446000 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "learning_rate": 5.9996237300890516e-06, | |
| "loss": 1.075, | |
| "step": 446500 | |
| }, | |
| { | |
| "epoch": 11.21, | |
| "learning_rate": 5.983945817132823e-06, | |
| "loss": 1.0764, | |
| "step": 447000 | |
| }, | |
| { | |
| "epoch": 11.23, | |
| "learning_rate": 5.968267904176597e-06, | |
| "loss": 1.0833, | |
| "step": 447500 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 5.9525899912203685e-06, | |
| "loss": 1.0742, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 5.936912078264142e-06, | |
| "loss": 1.0643, | |
| "step": 448500 | |
| }, | |
| { | |
| "epoch": 11.26, | |
| "learning_rate": 5.921234165307915e-06, | |
| "loss": 1.0914, | |
| "step": 449000 | |
| }, | |
| { | |
| "epoch": 11.28, | |
| "learning_rate": 5.905556252351687e-06, | |
| "loss": 1.1228, | |
| "step": 449500 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 5.889878339395461e-06, | |
| "loss": 1.0618, | |
| "step": 450000 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "learning_rate": 5.874200426439232e-06, | |
| "loss": 1.067, | |
| "step": 450500 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 5.858522513483006e-06, | |
| "loss": 1.0584, | |
| "step": 451000 | |
| }, | |
| { | |
| "epoch": 11.33, | |
| "learning_rate": 5.8428446005267785e-06, | |
| "loss": 1.0778, | |
| "step": 451500 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 5.827166687570551e-06, | |
| "loss": 1.1007, | |
| "step": 452000 | |
| }, | |
| { | |
| "epoch": 11.35, | |
| "learning_rate": 5.811488774614324e-06, | |
| "loss": 1.1034, | |
| "step": 452500 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 5.795810861658096e-06, | |
| "loss": 1.098, | |
| "step": 453000 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 5.78013294870187e-06, | |
| "loss": 1.0907, | |
| "step": 453500 | |
| }, | |
| { | |
| "epoch": 11.39, | |
| "learning_rate": 5.7644550357456415e-06, | |
| "loss": 1.0783, | |
| "step": 454000 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 5.748777122789415e-06, | |
| "loss": 1.0586, | |
| "step": 454500 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 5.7330992098331876e-06, | |
| "loss": 1.0824, | |
| "step": 455000 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 5.71742129687696e-06, | |
| "loss": 1.0782, | |
| "step": 455500 | |
| }, | |
| { | |
| "epoch": 11.44, | |
| "learning_rate": 5.701743383920733e-06, | |
| "loss": 1.1143, | |
| "step": 456000 | |
| }, | |
| { | |
| "epoch": 11.45, | |
| "learning_rate": 5.686065470964505e-06, | |
| "loss": 1.0858, | |
| "step": 456500 | |
| }, | |
| { | |
| "epoch": 11.46, | |
| "learning_rate": 5.670387558008278e-06, | |
| "loss": 1.0773, | |
| "step": 457000 | |
| }, | |
| { | |
| "epoch": 11.48, | |
| "learning_rate": 5.6547096450520514e-06, | |
| "loss": 1.0885, | |
| "step": 457500 | |
| }, | |
| { | |
| "epoch": 11.49, | |
| "learning_rate": 5.639031732095824e-06, | |
| "loss": 1.0955, | |
| "step": 458000 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "learning_rate": 5.623353819139597e-06, | |
| "loss": 1.0341, | |
| "step": 458500 | |
| }, | |
| { | |
| "epoch": 11.51, | |
| "learning_rate": 5.607675906183369e-06, | |
| "loss": 1.0371, | |
| "step": 459000 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 5.591997993227142e-06, | |
| "loss": 1.0617, | |
| "step": 459500 | |
| }, | |
| { | |
| "epoch": 11.54, | |
| "learning_rate": 5.576320080270915e-06, | |
| "loss": 1.1171, | |
| "step": 460000 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 5.560642167314687e-06, | |
| "loss": 1.0636, | |
| "step": 460500 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 5.5449642543584605e-06, | |
| "loss": 1.1014, | |
| "step": 461000 | |
| }, | |
| { | |
| "epoch": 11.58, | |
| "learning_rate": 5.529286341402233e-06, | |
| "loss": 1.0767, | |
| "step": 461500 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 5.513608428446006e-06, | |
| "loss": 1.0649, | |
| "step": 462000 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "learning_rate": 5.497930515489779e-06, | |
| "loss": 1.0812, | |
| "step": 462500 | |
| }, | |
| { | |
| "epoch": 11.61, | |
| "learning_rate": 5.482252602533551e-06, | |
| "loss": 1.0922, | |
| "step": 463000 | |
| }, | |
| { | |
| "epoch": 11.63, | |
| "learning_rate": 5.466574689577324e-06, | |
| "loss": 1.1091, | |
| "step": 463500 | |
| }, | |
| { | |
| "epoch": 11.64, | |
| "learning_rate": 5.450896776621096e-06, | |
| "loss": 1.0437, | |
| "step": 464000 | |
| }, | |
| { | |
| "epoch": 11.65, | |
| "learning_rate": 5.43521886366487e-06, | |
| "loss": 1.0618, | |
| "step": 464500 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 5.419540950708641e-06, | |
| "loss": 1.1011, | |
| "step": 465000 | |
| }, | |
| { | |
| "epoch": 11.68, | |
| "learning_rate": 5.403863037752415e-06, | |
| "loss": 1.1289, | |
| "step": 465500 | |
| }, | |
| { | |
| "epoch": 11.69, | |
| "learning_rate": 5.388185124796188e-06, | |
| "loss": 1.0708, | |
| "step": 466000 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 5.37250721183996e-06, | |
| "loss": 1.1337, | |
| "step": 466500 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 5.3568292988837335e-06, | |
| "loss": 1.0979, | |
| "step": 467000 | |
| }, | |
| { | |
| "epoch": 11.73, | |
| "learning_rate": 5.341151385927505e-06, | |
| "loss": 1.0715, | |
| "step": 467500 | |
| }, | |
| { | |
| "epoch": 11.74, | |
| "learning_rate": 5.325473472971279e-06, | |
| "loss": 1.067, | |
| "step": 468000 | |
| }, | |
| { | |
| "epoch": 11.75, | |
| "learning_rate": 5.3097955600150505e-06, | |
| "loss": 1.1121, | |
| "step": 468500 | |
| }, | |
| { | |
| "epoch": 11.76, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 1.101, | |
| "step": 469000 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 5.278439734102597e-06, | |
| "loss": 1.0775, | |
| "step": 469500 | |
| }, | |
| { | |
| "epoch": 11.79, | |
| "learning_rate": 5.262761821146369e-06, | |
| "loss": 1.0705, | |
| "step": 470000 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "learning_rate": 5.247083908190143e-06, | |
| "loss": 1.0401, | |
| "step": 470500 | |
| }, | |
| { | |
| "epoch": 11.81, | |
| "learning_rate": 5.231405995233914e-06, | |
| "loss": 1.0716, | |
| "step": 471000 | |
| }, | |
| { | |
| "epoch": 11.83, | |
| "learning_rate": 5.215728082277688e-06, | |
| "loss": 1.0586, | |
| "step": 471500 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 5.20005016932146e-06, | |
| "loss": 1.0127, | |
| "step": 472000 | |
| }, | |
| { | |
| "epoch": 11.85, | |
| "learning_rate": 5.184372256365233e-06, | |
| "loss": 1.0542, | |
| "step": 472500 | |
| }, | |
| { | |
| "epoch": 11.87, | |
| "learning_rate": 5.168694343409006e-06, | |
| "loss": 1.1456, | |
| "step": 473000 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 5.153016430452778e-06, | |
| "loss": 1.1196, | |
| "step": 473500 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 5.137338517496552e-06, | |
| "loss": 1.0219, | |
| "step": 474000 | |
| }, | |
| { | |
| "epoch": 11.9, | |
| "learning_rate": 5.121660604540324e-06, | |
| "loss": 1.0733, | |
| "step": 474500 | |
| }, | |
| { | |
| "epoch": 11.92, | |
| "learning_rate": 5.105982691584097e-06, | |
| "loss": 1.0688, | |
| "step": 475000 | |
| }, | |
| { | |
| "epoch": 11.93, | |
| "learning_rate": 5.0903047786278695e-06, | |
| "loss": 1.1228, | |
| "step": 475500 | |
| }, | |
| { | |
| "epoch": 11.94, | |
| "learning_rate": 5.074626865671642e-06, | |
| "loss": 1.1206, | |
| "step": 476000 | |
| }, | |
| { | |
| "epoch": 11.95, | |
| "learning_rate": 5.058948952715415e-06, | |
| "loss": 1.0533, | |
| "step": 476500 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 5.043271039759188e-06, | |
| "loss": 1.0524, | |
| "step": 477000 | |
| }, | |
| { | |
| "epoch": 11.98, | |
| "learning_rate": 5.02759312680296e-06, | |
| "loss": 1.0691, | |
| "step": 477500 | |
| }, | |
| { | |
| "epoch": 11.99, | |
| "learning_rate": 5.011915213846733e-06, | |
| "loss": 1.0857, | |
| "step": 478000 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "eval_loss": 1.1345129013061523, | |
| "eval_runtime": 128.1857, | |
| "eval_samples_per_second": 276.443, | |
| "eval_steps_per_second": 34.559, | |
| "step": 478380 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 4.996237300890506e-06, | |
| "loss": 1.085, | |
| "step": 478500 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 4.9805593879342786e-06, | |
| "loss": 1.0517, | |
| "step": 479000 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 4.964881474978051e-06, | |
| "loss": 1.0369, | |
| "step": 479500 | |
| }, | |
| { | |
| "epoch": 12.04, | |
| "learning_rate": 4.949203562021825e-06, | |
| "loss": 1.0236, | |
| "step": 480000 | |
| }, | |
| { | |
| "epoch": 12.05, | |
| "learning_rate": 4.933525649065597e-06, | |
| "loss": 1.1322, | |
| "step": 480500 | |
| }, | |
| { | |
| "epoch": 12.07, | |
| "learning_rate": 4.91784773610937e-06, | |
| "loss": 1.0517, | |
| "step": 481000 | |
| }, | |
| { | |
| "epoch": 12.08, | |
| "learning_rate": 4.9021698231531425e-06, | |
| "loss": 1.0192, | |
| "step": 481500 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 4.886491910196915e-06, | |
| "loss": 1.0468, | |
| "step": 482000 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "learning_rate": 4.870813997240688e-06, | |
| "loss": 1.0142, | |
| "step": 482500 | |
| }, | |
| { | |
| "epoch": 12.12, | |
| "learning_rate": 4.85513608428446e-06, | |
| "loss": 1.0511, | |
| "step": 483000 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 4.839458171328233e-06, | |
| "loss": 1.0379, | |
| "step": 483500 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 4.823780258372006e-06, | |
| "loss": 1.0645, | |
| "step": 484000 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 4.808102345415779e-06, | |
| "loss": 1.0567, | |
| "step": 484500 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 4.7924244324595515e-06, | |
| "loss": 1.0486, | |
| "step": 485000 | |
| }, | |
| { | |
| "epoch": 12.18, | |
| "learning_rate": 4.776746519503324e-06, | |
| "loss": 1.0741, | |
| "step": 485500 | |
| }, | |
| { | |
| "epoch": 12.19, | |
| "learning_rate": 4.761068606547097e-06, | |
| "loss": 1.0203, | |
| "step": 486000 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "learning_rate": 4.745390693590869e-06, | |
| "loss": 1.0649, | |
| "step": 486500 | |
| }, | |
| { | |
| "epoch": 12.22, | |
| "learning_rate": 4.729712780634642e-06, | |
| "loss": 1.0491, | |
| "step": 487000 | |
| }, | |
| { | |
| "epoch": 12.23, | |
| "learning_rate": 4.7140348676784146e-06, | |
| "loss": 1.085, | |
| "step": 487500 | |
| }, | |
| { | |
| "epoch": 12.24, | |
| "learning_rate": 4.698356954722188e-06, | |
| "loss": 1.042, | |
| "step": 488000 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "learning_rate": 4.682679041765961e-06, | |
| "loss": 1.0425, | |
| "step": 488500 | |
| }, | |
| { | |
| "epoch": 12.27, | |
| "learning_rate": 4.667001128809733e-06, | |
| "loss": 1.0799, | |
| "step": 489000 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 4.651323215853506e-06, | |
| "loss": 1.0631, | |
| "step": 489500 | |
| }, | |
| { | |
| "epoch": 12.29, | |
| "learning_rate": 4.6356453028972785e-06, | |
| "loss": 1.102, | |
| "step": 490000 | |
| }, | |
| { | |
| "epoch": 12.3, | |
| "learning_rate": 4.619967389941051e-06, | |
| "loss": 1.0461, | |
| "step": 490500 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 4.604289476984824e-06, | |
| "loss": 1.0657, | |
| "step": 491000 | |
| }, | |
| { | |
| "epoch": 12.33, | |
| "learning_rate": 4.588611564028596e-06, | |
| "loss": 1.0682, | |
| "step": 491500 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 4.57293365107237e-06, | |
| "loss": 1.0744, | |
| "step": 492000 | |
| }, | |
| { | |
| "epoch": 12.35, | |
| "learning_rate": 4.557255738116142e-06, | |
| "loss": 1.0515, | |
| "step": 492500 | |
| }, | |
| { | |
| "epoch": 12.37, | |
| "learning_rate": 4.541577825159915e-06, | |
| "loss": 1.069, | |
| "step": 493000 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 4.5258999122036875e-06, | |
| "loss": 1.0799, | |
| "step": 493500 | |
| }, | |
| { | |
| "epoch": 12.39, | |
| "learning_rate": 4.51022199924746e-06, | |
| "loss": 1.0657, | |
| "step": 494000 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "learning_rate": 4.494544086291233e-06, | |
| "loss": 1.0746, | |
| "step": 494500 | |
| }, | |
| { | |
| "epoch": 12.42, | |
| "learning_rate": 4.478866173335006e-06, | |
| "loss": 1.0562, | |
| "step": 495000 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 4.463188260378779e-06, | |
| "loss": 1.0672, | |
| "step": 495500 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 4.447510347422551e-06, | |
| "loss": 1.0975, | |
| "step": 496000 | |
| }, | |
| { | |
| "epoch": 12.45, | |
| "learning_rate": 4.431832434466324e-06, | |
| "loss": 1.0556, | |
| "step": 496500 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 4.416154521510097e-06, | |
| "loss": 1.114, | |
| "step": 497000 | |
| }, | |
| { | |
| "epoch": 12.48, | |
| "learning_rate": 4.40047660855387e-06, | |
| "loss": 1.0487, | |
| "step": 497500 | |
| }, | |
| { | |
| "epoch": 12.49, | |
| "learning_rate": 4.384798695597643e-06, | |
| "loss": 1.0882, | |
| "step": 498000 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 4.369120782641415e-06, | |
| "loss": 1.0642, | |
| "step": 498500 | |
| }, | |
| { | |
| "epoch": 12.52, | |
| "learning_rate": 4.353442869685188e-06, | |
| "loss": 1.0573, | |
| "step": 499000 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 4.3377649567289605e-06, | |
| "loss": 1.0843, | |
| "step": 499500 | |
| }, | |
| { | |
| "epoch": 12.54, | |
| "learning_rate": 4.322087043772734e-06, | |
| "loss": 1.0389, | |
| "step": 500000 | |
| }, | |
| { | |
| "epoch": 12.55, | |
| "learning_rate": 4.3064091308165066e-06, | |
| "loss": 1.0181, | |
| "step": 500500 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 4.290731217860279e-06, | |
| "loss": 1.066, | |
| "step": 501000 | |
| }, | |
| { | |
| "epoch": 12.58, | |
| "learning_rate": 4.275053304904052e-06, | |
| "loss": 1.0766, | |
| "step": 501500 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 4.259375391947824e-06, | |
| "loss": 1.0449, | |
| "step": 502000 | |
| }, | |
| { | |
| "epoch": 12.61, | |
| "learning_rate": 4.243697478991597e-06, | |
| "loss": 1.0254, | |
| "step": 502500 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 4.22801956603537e-06, | |
| "loss": 1.0405, | |
| "step": 503000 | |
| }, | |
| { | |
| "epoch": 12.63, | |
| "learning_rate": 4.212341653079142e-06, | |
| "loss": 1.0537, | |
| "step": 503500 | |
| }, | |
| { | |
| "epoch": 12.64, | |
| "learning_rate": 4.196663740122916e-06, | |
| "loss": 1.0986, | |
| "step": 504000 | |
| }, | |
| { | |
| "epoch": 12.66, | |
| "learning_rate": 4.180985827166688e-06, | |
| "loss": 1.0777, | |
| "step": 504500 | |
| }, | |
| { | |
| "epoch": 12.67, | |
| "learning_rate": 4.165307914210461e-06, | |
| "loss": 1.0084, | |
| "step": 505000 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 4.1496300012542335e-06, | |
| "loss": 1.0684, | |
| "step": 505500 | |
| }, | |
| { | |
| "epoch": 12.69, | |
| "learning_rate": 4.133952088298006e-06, | |
| "loss": 1.0387, | |
| "step": 506000 | |
| }, | |
| { | |
| "epoch": 12.71, | |
| "learning_rate": 4.118274175341779e-06, | |
| "loss": 1.0547, | |
| "step": 506500 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 4.102596262385551e-06, | |
| "loss": 1.051, | |
| "step": 507000 | |
| }, | |
| { | |
| "epoch": 12.73, | |
| "learning_rate": 4.086918349429324e-06, | |
| "loss": 1.066, | |
| "step": 507500 | |
| }, | |
| { | |
| "epoch": 12.74, | |
| "learning_rate": 4.071240436473097e-06, | |
| "loss": 1.0669, | |
| "step": 508000 | |
| }, | |
| { | |
| "epoch": 12.76, | |
| "learning_rate": 4.05556252351687e-06, | |
| "loss": 1.0481, | |
| "step": 508500 | |
| }, | |
| { | |
| "epoch": 12.77, | |
| "learning_rate": 4.0398846105606426e-06, | |
| "loss": 1.0656, | |
| "step": 509000 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 4.024206697604415e-06, | |
| "loss": 1.0604, | |
| "step": 509500 | |
| }, | |
| { | |
| "epoch": 12.79, | |
| "learning_rate": 4.008528784648188e-06, | |
| "loss": 1.067, | |
| "step": 510000 | |
| }, | |
| { | |
| "epoch": 12.81, | |
| "learning_rate": 3.99285087169196e-06, | |
| "loss": 1.0644, | |
| "step": 510500 | |
| }, | |
| { | |
| "epoch": 12.82, | |
| "learning_rate": 3.977172958735733e-06, | |
| "loss": 1.042, | |
| "step": 511000 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 3.961495045779506e-06, | |
| "loss": 1.0498, | |
| "step": 511500 | |
| }, | |
| { | |
| "epoch": 12.84, | |
| "learning_rate": 3.945817132823279e-06, | |
| "loss": 1.0805, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 3.930139219867052e-06, | |
| "loss": 1.0669, | |
| "step": 512500 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 3.914461306910824e-06, | |
| "loss": 1.0549, | |
| "step": 513000 | |
| }, | |
| { | |
| "epoch": 12.88, | |
| "learning_rate": 3.898783393954597e-06, | |
| "loss": 1.0469, | |
| "step": 513500 | |
| }, | |
| { | |
| "epoch": 12.89, | |
| "learning_rate": 3.8831054809983695e-06, | |
| "loss": 1.0471, | |
| "step": 514000 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 3.867427568042142e-06, | |
| "loss": 1.0423, | |
| "step": 514500 | |
| }, | |
| { | |
| "epoch": 12.92, | |
| "learning_rate": 3.8517496550859155e-06, | |
| "loss": 1.0984, | |
| "step": 515000 | |
| }, | |
| { | |
| "epoch": 12.93, | |
| "learning_rate": 3.836071742129688e-06, | |
| "loss": 1.0397, | |
| "step": 515500 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 3.820393829173461e-06, | |
| "loss": 1.0844, | |
| "step": 516000 | |
| }, | |
| { | |
| "epoch": 12.96, | |
| "learning_rate": 3.8047159162172338e-06, | |
| "loss": 1.0314, | |
| "step": 516500 | |
| }, | |
| { | |
| "epoch": 12.97, | |
| "learning_rate": 3.7890380032610064e-06, | |
| "loss": 1.078, | |
| "step": 517000 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 3.773360090304779e-06, | |
| "loss": 1.0066, | |
| "step": 517500 | |
| }, | |
| { | |
| "epoch": 12.99, | |
| "learning_rate": 3.7576821773485516e-06, | |
| "loss": 1.0561, | |
| "step": 518000 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "eval_loss": 1.1331528425216675, | |
| "eval_runtime": 128.581, | |
| "eval_samples_per_second": 275.593, | |
| "eval_steps_per_second": 34.453, | |
| "step": 518245 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 3.742004264392324e-06, | |
| "loss": 1.0781, | |
| "step": 518500 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 3.726326351436097e-06, | |
| "loss": 1.0371, | |
| "step": 519000 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 3.71064843847987e-06, | |
| "loss": 1.0217, | |
| "step": 519500 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 3.6949705255236424e-06, | |
| "loss": 1.0137, | |
| "step": 520000 | |
| }, | |
| { | |
| "epoch": 13.06, | |
| "learning_rate": 3.6792926125674155e-06, | |
| "loss": 1.0569, | |
| "step": 520500 | |
| }, | |
| { | |
| "epoch": 13.07, | |
| "learning_rate": 3.663614699611188e-06, | |
| "loss": 1.0482, | |
| "step": 521000 | |
| }, | |
| { | |
| "epoch": 13.08, | |
| "learning_rate": 3.6479367866549607e-06, | |
| "loss": 1.0459, | |
| "step": 521500 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 3.6322588736987337e-06, | |
| "loss": 0.9999, | |
| "step": 522000 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 3.6165809607425063e-06, | |
| "loss": 1.0628, | |
| "step": 522500 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 3.600903047786279e-06, | |
| "loss": 1.0165, | |
| "step": 523000 | |
| }, | |
| { | |
| "epoch": 13.13, | |
| "learning_rate": 3.5852251348300515e-06, | |
| "loss": 1.0406, | |
| "step": 523500 | |
| }, | |
| { | |
| "epoch": 13.14, | |
| "learning_rate": 3.569547221873824e-06, | |
| "loss": 1.0782, | |
| "step": 524000 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 3.5538693089175976e-06, | |
| "loss": 1.0609, | |
| "step": 524500 | |
| }, | |
| { | |
| "epoch": 13.17, | |
| "learning_rate": 3.53819139596137e-06, | |
| "loss": 1.0281, | |
| "step": 525000 | |
| }, | |
| { | |
| "epoch": 13.18, | |
| "learning_rate": 3.522513483005143e-06, | |
| "loss": 1.0175, | |
| "step": 525500 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 3.5068355700489154e-06, | |
| "loss": 1.0347, | |
| "step": 526000 | |
| }, | |
| { | |
| "epoch": 13.21, | |
| "learning_rate": 3.491157657092688e-06, | |
| "loss": 1.0351, | |
| "step": 526500 | |
| }, | |
| { | |
| "epoch": 13.22, | |
| "learning_rate": 3.4754797441364606e-06, | |
| "loss": 1.0858, | |
| "step": 527000 | |
| }, | |
| { | |
| "epoch": 13.23, | |
| "learning_rate": 3.4598018311802332e-06, | |
| "loss": 1.0591, | |
| "step": 527500 | |
| }, | |
| { | |
| "epoch": 13.24, | |
| "learning_rate": 3.4441239182240063e-06, | |
| "loss": 1.0785, | |
| "step": 528000 | |
| }, | |
| { | |
| "epoch": 13.26, | |
| "learning_rate": 3.4284460052677793e-06, | |
| "loss": 1.065, | |
| "step": 528500 | |
| }, | |
| { | |
| "epoch": 13.27, | |
| "learning_rate": 3.412768092311552e-06, | |
| "loss": 1.0888, | |
| "step": 529000 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 3.3970901793553245e-06, | |
| "loss": 1.029, | |
| "step": 529500 | |
| }, | |
| { | |
| "epoch": 13.29, | |
| "learning_rate": 3.381412266399097e-06, | |
| "loss": 1.0326, | |
| "step": 530000 | |
| }, | |
| { | |
| "epoch": 13.31, | |
| "learning_rate": 3.3657343534428697e-06, | |
| "loss": 1.0316, | |
| "step": 530500 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 3.3500564404866427e-06, | |
| "loss": 1.0677, | |
| "step": 531000 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 3.3343785275304153e-06, | |
| "loss": 1.007, | |
| "step": 531500 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 3.318700614574188e-06, | |
| "loss": 1.0707, | |
| "step": 532000 | |
| }, | |
| { | |
| "epoch": 13.36, | |
| "learning_rate": 3.303022701617961e-06, | |
| "loss": 1.047, | |
| "step": 532500 | |
| }, | |
| { | |
| "epoch": 13.37, | |
| "learning_rate": 3.2873447886617336e-06, | |
| "loss": 1.0405, | |
| "step": 533000 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 3.2716668757055066e-06, | |
| "loss": 1.0474, | |
| "step": 533500 | |
| }, | |
| { | |
| "epoch": 13.4, | |
| "learning_rate": 3.2559889627492792e-06, | |
| "loss": 1.056, | |
| "step": 534000 | |
| }, | |
| { | |
| "epoch": 13.41, | |
| "learning_rate": 3.240311049793052e-06, | |
| "loss": 1.062, | |
| "step": 534500 | |
| }, | |
| { | |
| "epoch": 13.42, | |
| "learning_rate": 3.2246331368368244e-06, | |
| "loss": 1.03, | |
| "step": 535000 | |
| }, | |
| { | |
| "epoch": 13.43, | |
| "learning_rate": 3.208955223880597e-06, | |
| "loss": 1.0735, | |
| "step": 535500 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 3.1932773109243696e-06, | |
| "loss": 1.0494, | |
| "step": 536000 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 3.177599397968143e-06, | |
| "loss": 1.0271, | |
| "step": 536500 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 3.1619214850119157e-06, | |
| "loss": 1.0283, | |
| "step": 537000 | |
| }, | |
| { | |
| "epoch": 13.48, | |
| "learning_rate": 3.1462435720556883e-06, | |
| "loss": 1.0483, | |
| "step": 537500 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 3.130565659099461e-06, | |
| "loss": 1.0449, | |
| "step": 538000 | |
| }, | |
| { | |
| "epoch": 13.51, | |
| "learning_rate": 3.1148877461432335e-06, | |
| "loss": 1.0583, | |
| "step": 538500 | |
| }, | |
| { | |
| "epoch": 13.52, | |
| "learning_rate": 3.099209833187006e-06, | |
| "loss": 1.035, | |
| "step": 539000 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 3.083531920230779e-06, | |
| "loss": 1.0174, | |
| "step": 539500 | |
| }, | |
| { | |
| "epoch": 13.55, | |
| "learning_rate": 3.0678540072745518e-06, | |
| "loss": 1.0895, | |
| "step": 540000 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 3.052176094318325e-06, | |
| "loss": 1.1153, | |
| "step": 540500 | |
| }, | |
| { | |
| "epoch": 13.57, | |
| "learning_rate": 3.0364981813620974e-06, | |
| "loss": 1.0644, | |
| "step": 541000 | |
| }, | |
| { | |
| "epoch": 13.58, | |
| "learning_rate": 3.02082026840587e-06, | |
| "loss": 0.9903, | |
| "step": 541500 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 3.005142355449643e-06, | |
| "loss": 1.0423, | |
| "step": 542000 | |
| }, | |
| { | |
| "epoch": 13.61, | |
| "learning_rate": 2.9894644424934156e-06, | |
| "loss": 1.068, | |
| "step": 542500 | |
| }, | |
| { | |
| "epoch": 13.62, | |
| "learning_rate": 2.9737865295371883e-06, | |
| "loss": 1.062, | |
| "step": 543000 | |
| }, | |
| { | |
| "epoch": 13.63, | |
| "learning_rate": 2.958108616580961e-06, | |
| "loss": 1.0714, | |
| "step": 543500 | |
| }, | |
| { | |
| "epoch": 13.65, | |
| "learning_rate": 2.9424307036247335e-06, | |
| "loss": 1.0567, | |
| "step": 544000 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 2.9267527906685065e-06, | |
| "loss": 1.0153, | |
| "step": 544500 | |
| }, | |
| { | |
| "epoch": 13.67, | |
| "learning_rate": 2.9110748777122795e-06, | |
| "loss": 0.9978, | |
| "step": 545000 | |
| }, | |
| { | |
| "epoch": 13.68, | |
| "learning_rate": 2.895396964756052e-06, | |
| "loss": 1.0683, | |
| "step": 545500 | |
| }, | |
| { | |
| "epoch": 13.7, | |
| "learning_rate": 2.8797190517998247e-06, | |
| "loss": 1.0301, | |
| "step": 546000 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 2.8640411388435973e-06, | |
| "loss": 1.0082, | |
| "step": 546500 | |
| }, | |
| { | |
| "epoch": 13.72, | |
| "learning_rate": 2.84836322588737e-06, | |
| "loss": 1.0835, | |
| "step": 547000 | |
| }, | |
| { | |
| "epoch": 13.73, | |
| "learning_rate": 2.8326853129311426e-06, | |
| "loss": 1.0267, | |
| "step": 547500 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "learning_rate": 2.817007399974915e-06, | |
| "loss": 0.9884, | |
| "step": 548000 | |
| }, | |
| { | |
| "epoch": 13.76, | |
| "learning_rate": 2.8013294870186886e-06, | |
| "loss": 1.0167, | |
| "step": 548500 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 2.7856515740624612e-06, | |
| "loss": 0.9803, | |
| "step": 549000 | |
| }, | |
| { | |
| "epoch": 13.78, | |
| "learning_rate": 2.769973661106234e-06, | |
| "loss": 1.0467, | |
| "step": 549500 | |
| }, | |
| { | |
| "epoch": 13.8, | |
| "learning_rate": 2.7542957481500064e-06, | |
| "loss": 1.0797, | |
| "step": 550000 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 2.738617835193779e-06, | |
| "loss": 1.0044, | |
| "step": 550500 | |
| }, | |
| { | |
| "epoch": 13.82, | |
| "learning_rate": 2.722939922237552e-06, | |
| "loss": 1.0395, | |
| "step": 551000 | |
| }, | |
| { | |
| "epoch": 13.83, | |
| "learning_rate": 2.7072620092813247e-06, | |
| "loss": 1.0939, | |
| "step": 551500 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 2.6915840963250973e-06, | |
| "loss": 1.0077, | |
| "step": 552000 | |
| }, | |
| { | |
| "epoch": 13.86, | |
| "learning_rate": 2.6759061833688703e-06, | |
| "loss": 1.0878, | |
| "step": 552500 | |
| }, | |
| { | |
| "epoch": 13.87, | |
| "learning_rate": 2.660228270412643e-06, | |
| "loss": 1.0205, | |
| "step": 553000 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "learning_rate": 2.644550357456416e-06, | |
| "loss": 0.993, | |
| "step": 553500 | |
| }, | |
| { | |
| "epoch": 13.9, | |
| "learning_rate": 2.6288724445001886e-06, | |
| "loss": 1.0401, | |
| "step": 554000 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 2.613194531543961e-06, | |
| "loss": 1.0286, | |
| "step": 554500 | |
| }, | |
| { | |
| "epoch": 13.92, | |
| "learning_rate": 2.5975166185877338e-06, | |
| "loss": 1.0799, | |
| "step": 555000 | |
| }, | |
| { | |
| "epoch": 13.93, | |
| "learning_rate": 2.5818387056315064e-06, | |
| "loss": 1.002, | |
| "step": 555500 | |
| }, | |
| { | |
| "epoch": 13.95, | |
| "learning_rate": 2.566160792675279e-06, | |
| "loss": 1.009, | |
| "step": 556000 | |
| }, | |
| { | |
| "epoch": 13.96, | |
| "learning_rate": 2.5504828797190524e-06, | |
| "loss": 1.029, | |
| "step": 556500 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 2.534804966762825e-06, | |
| "loss": 1.0592, | |
| "step": 557000 | |
| }, | |
| { | |
| "epoch": 13.98, | |
| "learning_rate": 2.5191270538065976e-06, | |
| "loss": 1.0336, | |
| "step": 557500 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 2.5034491408503702e-06, | |
| "loss": 1.0431, | |
| "step": 558000 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "eval_loss": 1.1127265691757202, | |
| "eval_runtime": 127.2849, | |
| "eval_samples_per_second": 278.399, | |
| "eval_steps_per_second": 34.804, | |
| "step": 558110 | |
| }, | |
| { | |
| "epoch": 14.01, | |
| "learning_rate": 2.487771227894143e-06, | |
| "loss": 1.0293, | |
| "step": 558500 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 2.4720933149379155e-06, | |
| "loss": 1.0318, | |
| "step": 559000 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 2.456415401981688e-06, | |
| "loss": 1.0244, | |
| "step": 559500 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 2.440737489025461e-06, | |
| "loss": 1.0643, | |
| "step": 560000 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 2.4250595760692337e-06, | |
| "loss": 1.0464, | |
| "step": 560500 | |
| }, | |
| { | |
| "epoch": 14.07, | |
| "learning_rate": 2.4093816631130067e-06, | |
| "loss": 1.0078, | |
| "step": 561000 | |
| }, | |
| { | |
| "epoch": 14.09, | |
| "learning_rate": 2.3937037501567793e-06, | |
| "loss": 1.0639, | |
| "step": 561500 | |
| }, | |
| { | |
| "epoch": 14.1, | |
| "learning_rate": 2.378025837200552e-06, | |
| "loss": 1.0297, | |
| "step": 562000 | |
| }, | |
| { | |
| "epoch": 14.11, | |
| "learning_rate": 2.362347924244325e-06, | |
| "loss": 1.0035, | |
| "step": 562500 | |
| }, | |
| { | |
| "epoch": 14.12, | |
| "learning_rate": 2.3466700112880976e-06, | |
| "loss": 1.0322, | |
| "step": 563000 | |
| }, | |
| { | |
| "epoch": 14.14, | |
| "learning_rate": 2.33099209833187e-06, | |
| "loss": 1.038, | |
| "step": 563500 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 2.3153141853756432e-06, | |
| "loss": 0.9761, | |
| "step": 564000 | |
| }, | |
| { | |
| "epoch": 14.16, | |
| "learning_rate": 2.299636272419416e-06, | |
| "loss": 1.0188, | |
| "step": 564500 | |
| }, | |
| { | |
| "epoch": 14.17, | |
| "learning_rate": 2.2839583594631884e-06, | |
| "loss": 1.0077, | |
| "step": 565000 | |
| }, | |
| { | |
| "epoch": 14.19, | |
| "learning_rate": 2.268280446506961e-06, | |
| "loss": 1.0246, | |
| "step": 565500 | |
| }, | |
| { | |
| "epoch": 14.2, | |
| "learning_rate": 2.252602533550734e-06, | |
| "loss": 1.0695, | |
| "step": 566000 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 2.2369246205945067e-06, | |
| "loss": 1.0196, | |
| "step": 566500 | |
| }, | |
| { | |
| "epoch": 14.22, | |
| "learning_rate": 2.2212467076382793e-06, | |
| "loss": 1.0, | |
| "step": 567000 | |
| }, | |
| { | |
| "epoch": 14.24, | |
| "learning_rate": 2.205568794682052e-06, | |
| "loss": 1.0327, | |
| "step": 567500 | |
| }, | |
| { | |
| "epoch": 14.25, | |
| "learning_rate": 2.189890881725825e-06, | |
| "loss": 0.9988, | |
| "step": 568000 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 2.1742129687695975e-06, | |
| "loss": 1.0017, | |
| "step": 568500 | |
| }, | |
| { | |
| "epoch": 14.27, | |
| "learning_rate": 2.15853505581337e-06, | |
| "loss": 1.0132, | |
| "step": 569000 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 2.1428571428571427e-06, | |
| "loss": 1.0325, | |
| "step": 569500 | |
| }, | |
| { | |
| "epoch": 14.3, | |
| "learning_rate": 2.1271792299009158e-06, | |
| "loss": 1.0412, | |
| "step": 570000 | |
| }, | |
| { | |
| "epoch": 14.31, | |
| "learning_rate": 2.1115013169446884e-06, | |
| "loss": 0.9906, | |
| "step": 570500 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 2.095823403988461e-06, | |
| "loss": 0.9947, | |
| "step": 571000 | |
| }, | |
| { | |
| "epoch": 14.34, | |
| "learning_rate": 2.080145491032234e-06, | |
| "loss": 1.0673, | |
| "step": 571500 | |
| }, | |
| { | |
| "epoch": 14.35, | |
| "learning_rate": 2.0644675780760066e-06, | |
| "loss": 1.0175, | |
| "step": 572000 | |
| }, | |
| { | |
| "epoch": 14.36, | |
| "learning_rate": 2.0487896651197796e-06, | |
| "loss": 1.0722, | |
| "step": 572500 | |
| }, | |
| { | |
| "epoch": 14.37, | |
| "learning_rate": 2.0331117521635522e-06, | |
| "loss": 1.0239, | |
| "step": 573000 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 2.017433839207325e-06, | |
| "loss": 1.0321, | |
| "step": 573500 | |
| }, | |
| { | |
| "epoch": 14.4, | |
| "learning_rate": 2.001755926251098e-06, | |
| "loss": 1.0703, | |
| "step": 574000 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 1.9860780132948705e-06, | |
| "loss": 1.0156, | |
| "step": 574500 | |
| }, | |
| { | |
| "epoch": 14.42, | |
| "learning_rate": 1.970400100338643e-06, | |
| "loss": 1.0085, | |
| "step": 575000 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 1.9547221873824157e-06, | |
| "loss": 1.0225, | |
| "step": 575500 | |
| }, | |
| { | |
| "epoch": 14.45, | |
| "learning_rate": 1.9390442744261887e-06, | |
| "loss": 1.0439, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 14.46, | |
| "learning_rate": 1.9233663614699613e-06, | |
| "loss": 1.032, | |
| "step": 576500 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 1.907688448513734e-06, | |
| "loss": 1.0493, | |
| "step": 577000 | |
| }, | |
| { | |
| "epoch": 14.49, | |
| "learning_rate": 1.8920105355575065e-06, | |
| "loss": 1.0107, | |
| "step": 577500 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "learning_rate": 1.8763326226012796e-06, | |
| "loss": 1.0462, | |
| "step": 578000 | |
| }, | |
| { | |
| "epoch": 14.51, | |
| "learning_rate": 1.8606547096450522e-06, | |
| "loss": 1.0023, | |
| "step": 578500 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 1.844976796688825e-06, | |
| "loss": 1.0987, | |
| "step": 579000 | |
| }, | |
| { | |
| "epoch": 14.54, | |
| "learning_rate": 1.8292988837325976e-06, | |
| "loss": 1.0454, | |
| "step": 579500 | |
| }, | |
| { | |
| "epoch": 14.55, | |
| "learning_rate": 1.8136209707763704e-06, | |
| "loss": 1.0101, | |
| "step": 580000 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "learning_rate": 1.7979430578201432e-06, | |
| "loss": 1.0031, | |
| "step": 580500 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 1.7822651448639158e-06, | |
| "loss": 1.0342, | |
| "step": 581000 | |
| }, | |
| { | |
| "epoch": 14.59, | |
| "learning_rate": 1.7665872319076885e-06, | |
| "loss": 1.0407, | |
| "step": 581500 | |
| }, | |
| { | |
| "epoch": 14.6, | |
| "learning_rate": 1.7509093189514615e-06, | |
| "loss": 1.005, | |
| "step": 582000 | |
| }, | |
| { | |
| "epoch": 14.61, | |
| "learning_rate": 1.735231405995234e-06, | |
| "loss": 1.0042, | |
| "step": 582500 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 1.7195534930390067e-06, | |
| "loss": 1.0799, | |
| "step": 583000 | |
| }, | |
| { | |
| "epoch": 14.64, | |
| "learning_rate": 1.7038755800827795e-06, | |
| "loss": 1.018, | |
| "step": 583500 | |
| }, | |
| { | |
| "epoch": 14.65, | |
| "learning_rate": 1.6881976671265523e-06, | |
| "loss": 0.9983, | |
| "step": 584000 | |
| }, | |
| { | |
| "epoch": 14.66, | |
| "learning_rate": 1.672519754170325e-06, | |
| "loss": 1.0174, | |
| "step": 584500 | |
| }, | |
| { | |
| "epoch": 14.67, | |
| "learning_rate": 1.6568418412140978e-06, | |
| "loss": 1.0593, | |
| "step": 585000 | |
| }, | |
| { | |
| "epoch": 14.69, | |
| "learning_rate": 1.6411639282578704e-06, | |
| "loss": 1.0357, | |
| "step": 585500 | |
| }, | |
| { | |
| "epoch": 14.7, | |
| "learning_rate": 1.6254860153016434e-06, | |
| "loss": 1.0329, | |
| "step": 586000 | |
| }, | |
| { | |
| "epoch": 14.71, | |
| "learning_rate": 1.609808102345416e-06, | |
| "loss": 0.9979, | |
| "step": 586500 | |
| }, | |
| { | |
| "epoch": 14.72, | |
| "learning_rate": 1.5941301893891886e-06, | |
| "loss": 1.0639, | |
| "step": 587000 | |
| }, | |
| { | |
| "epoch": 14.74, | |
| "learning_rate": 1.5784522764329612e-06, | |
| "loss": 1.0678, | |
| "step": 587500 | |
| }, | |
| { | |
| "epoch": 14.75, | |
| "learning_rate": 1.5627743634767342e-06, | |
| "loss": 0.9992, | |
| "step": 588000 | |
| }, | |
| { | |
| "epoch": 14.76, | |
| "learning_rate": 1.5470964505205068e-06, | |
| "loss": 1.0042, | |
| "step": 588500 | |
| }, | |
| { | |
| "epoch": 14.77, | |
| "learning_rate": 1.5314185375642795e-06, | |
| "loss": 1.002, | |
| "step": 589000 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 1.5157406246080523e-06, | |
| "loss": 1.0495, | |
| "step": 589500 | |
| }, | |
| { | |
| "epoch": 14.8, | |
| "learning_rate": 1.500062711651825e-06, | |
| "loss": 1.0445, | |
| "step": 590000 | |
| }, | |
| { | |
| "epoch": 14.81, | |
| "learning_rate": 1.484384798695598e-06, | |
| "loss": 1.0455, | |
| "step": 590500 | |
| }, | |
| { | |
| "epoch": 14.83, | |
| "learning_rate": 1.4687068857393705e-06, | |
| "loss": 1.0491, | |
| "step": 591000 | |
| }, | |
| { | |
| "epoch": 14.84, | |
| "learning_rate": 1.4530289727831431e-06, | |
| "loss": 1.0236, | |
| "step": 591500 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 1.4373510598269161e-06, | |
| "loss": 1.0846, | |
| "step": 592000 | |
| }, | |
| { | |
| "epoch": 14.86, | |
| "learning_rate": 1.4216731468706888e-06, | |
| "loss": 1.0061, | |
| "step": 592500 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 1.4059952339144614e-06, | |
| "loss": 0.9879, | |
| "step": 593000 | |
| }, | |
| { | |
| "epoch": 14.89, | |
| "learning_rate": 1.3903173209582342e-06, | |
| "loss": 1.1006, | |
| "step": 593500 | |
| }, | |
| { | |
| "epoch": 14.9, | |
| "learning_rate": 1.374639408002007e-06, | |
| "loss": 1.0208, | |
| "step": 594000 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 1.3589614950457796e-06, | |
| "loss": 0.9906, | |
| "step": 594500 | |
| }, | |
| { | |
| "epoch": 14.93, | |
| "learning_rate": 1.3432835820895524e-06, | |
| "loss": 1.0184, | |
| "step": 595000 | |
| }, | |
| { | |
| "epoch": 14.94, | |
| "learning_rate": 1.327605669133325e-06, | |
| "loss": 1.0255, | |
| "step": 595500 | |
| }, | |
| { | |
| "epoch": 14.95, | |
| "learning_rate": 1.3119277561770978e-06, | |
| "loss": 0.9888, | |
| "step": 596000 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 1.2962498432208707e-06, | |
| "loss": 0.9886, | |
| "step": 596500 | |
| }, | |
| { | |
| "epoch": 14.98, | |
| "learning_rate": 1.2805719302646433e-06, | |
| "loss": 1.0307, | |
| "step": 597000 | |
| }, | |
| { | |
| "epoch": 14.99, | |
| "learning_rate": 1.2648940173084159e-06, | |
| "loss": 1.0273, | |
| "step": 597500 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "eval_loss": 1.109073281288147, | |
| "eval_runtime": 127.2061, | |
| "eval_samples_per_second": 278.572, | |
| "eval_steps_per_second": 34.825, | |
| "step": 597975 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 1.2492161043521887e-06, | |
| "loss": 1.0451, | |
| "step": 598000 | |
| }, | |
| { | |
| "epoch": 15.01, | |
| "learning_rate": 1.2335381913959615e-06, | |
| "loss": 1.029, | |
| "step": 598500 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 1.2178602784397341e-06, | |
| "loss": 0.9851, | |
| "step": 599000 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 1.202182365483507e-06, | |
| "loss": 1.041, | |
| "step": 599500 | |
| }, | |
| { | |
| "epoch": 15.05, | |
| "learning_rate": 1.1865044525272797e-06, | |
| "loss": 1.0834, | |
| "step": 600000 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 1.1708265395710524e-06, | |
| "loss": 1.0252, | |
| "step": 600500 | |
| }, | |
| { | |
| "epoch": 15.08, | |
| "learning_rate": 1.1551486266148252e-06, | |
| "loss": 0.9628, | |
| "step": 601000 | |
| }, | |
| { | |
| "epoch": 15.09, | |
| "learning_rate": 1.1394707136585978e-06, | |
| "loss": 0.9933, | |
| "step": 601500 | |
| }, | |
| { | |
| "epoch": 15.1, | |
| "learning_rate": 1.1237928007023706e-06, | |
| "loss": 0.9848, | |
| "step": 602000 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "learning_rate": 1.1081148877461432e-06, | |
| "loss": 1.027, | |
| "step": 602500 | |
| }, | |
| { | |
| "epoch": 15.13, | |
| "learning_rate": 1.092436974789916e-06, | |
| "loss": 0.9849, | |
| "step": 603000 | |
| }, | |
| { | |
| "epoch": 15.14, | |
| "learning_rate": 1.0767590618336886e-06, | |
| "loss": 0.9837, | |
| "step": 603500 | |
| }, | |
| { | |
| "epoch": 15.15, | |
| "learning_rate": 1.0610811488774614e-06, | |
| "loss": 1.0149, | |
| "step": 604000 | |
| }, | |
| { | |
| "epoch": 15.16, | |
| "learning_rate": 1.0454032359212343e-06, | |
| "loss": 1.0314, | |
| "step": 604500 | |
| }, | |
| { | |
| "epoch": 15.18, | |
| "learning_rate": 1.029725322965007e-06, | |
| "loss": 1.0298, | |
| "step": 605000 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 1.0140474100087797e-06, | |
| "loss": 1.0303, | |
| "step": 605500 | |
| }, | |
| { | |
| "epoch": 15.2, | |
| "learning_rate": 9.983694970525525e-07, | |
| "loss": 0.989, | |
| "step": 606000 | |
| }, | |
| { | |
| "epoch": 15.21, | |
| "learning_rate": 9.826915840963251e-07, | |
| "loss": 1.0632, | |
| "step": 606500 | |
| }, | |
| { | |
| "epoch": 15.23, | |
| "learning_rate": 9.67013671140098e-07, | |
| "loss": 0.9958, | |
| "step": 607000 | |
| }, | |
| { | |
| "epoch": 15.24, | |
| "learning_rate": 9.513357581838705e-07, | |
| "loss": 0.9956, | |
| "step": 607500 | |
| }, | |
| { | |
| "epoch": 15.25, | |
| "learning_rate": 9.356578452276434e-07, | |
| "loss": 1.017, | |
| "step": 608000 | |
| }, | |
| { | |
| "epoch": 15.26, | |
| "learning_rate": 9.199799322714161e-07, | |
| "loss": 1.0128, | |
| "step": 608500 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 9.043020193151889e-07, | |
| "loss": 0.9813, | |
| "step": 609000 | |
| }, | |
| { | |
| "epoch": 15.29, | |
| "learning_rate": 8.886241063589615e-07, | |
| "loss": 0.998, | |
| "step": 609500 | |
| }, | |
| { | |
| "epoch": 15.3, | |
| "learning_rate": 8.729461934027343e-07, | |
| "loss": 0.9707, | |
| "step": 610000 | |
| }, | |
| { | |
| "epoch": 15.31, | |
| "learning_rate": 8.57268280446507e-07, | |
| "loss": 1.0107, | |
| "step": 610500 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 8.415903674902797e-07, | |
| "loss": 1.0159, | |
| "step": 611000 | |
| }, | |
| { | |
| "epoch": 15.34, | |
| "learning_rate": 8.259124545340524e-07, | |
| "loss": 1.0025, | |
| "step": 611500 | |
| }, | |
| { | |
| "epoch": 15.35, | |
| "learning_rate": 8.102345415778253e-07, | |
| "loss": 0.9583, | |
| "step": 612000 | |
| }, | |
| { | |
| "epoch": 15.36, | |
| "learning_rate": 7.945566286215979e-07, | |
| "loss": 1.0236, | |
| "step": 612500 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 7.788787156653707e-07, | |
| "loss": 1.0134, | |
| "step": 613000 | |
| }, | |
| { | |
| "epoch": 15.39, | |
| "learning_rate": 7.632008027091434e-07, | |
| "loss": 0.9746, | |
| "step": 613500 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 7.475228897529162e-07, | |
| "loss": 0.9889, | |
| "step": 614000 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 7.318449767966888e-07, | |
| "loss": 1.0125, | |
| "step": 614500 | |
| }, | |
| { | |
| "epoch": 15.43, | |
| "learning_rate": 7.161670638404616e-07, | |
| "loss": 1.0084, | |
| "step": 615000 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 7.004891508842344e-07, | |
| "loss": 1.043, | |
| "step": 615500 | |
| }, | |
| { | |
| "epoch": 15.45, | |
| "learning_rate": 6.848112379280071e-07, | |
| "loss": 0.9813, | |
| "step": 616000 | |
| }, | |
| { | |
| "epoch": 15.46, | |
| "learning_rate": 6.691333249717798e-07, | |
| "loss": 1.0617, | |
| "step": 616500 | |
| }, | |
| { | |
| "epoch": 15.48, | |
| "learning_rate": 6.534554120155526e-07, | |
| "loss": 1.0439, | |
| "step": 617000 | |
| }, | |
| { | |
| "epoch": 15.49, | |
| "learning_rate": 6.377774990593252e-07, | |
| "loss": 0.9883, | |
| "step": 617500 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 6.22099586103098e-07, | |
| "loss": 1.0232, | |
| "step": 618000 | |
| }, | |
| { | |
| "epoch": 15.51, | |
| "learning_rate": 6.064216731468707e-07, | |
| "loss": 1.086, | |
| "step": 618500 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 5.907437601906435e-07, | |
| "loss": 1.0029, | |
| "step": 619000 | |
| }, | |
| { | |
| "epoch": 15.54, | |
| "learning_rate": 5.750658472344163e-07, | |
| "loss": 0.9831, | |
| "step": 619500 | |
| }, | |
| { | |
| "epoch": 15.55, | |
| "learning_rate": 5.59387934278189e-07, | |
| "loss": 1.0251, | |
| "step": 620000 | |
| }, | |
| { | |
| "epoch": 15.57, | |
| "learning_rate": 5.437100213219617e-07, | |
| "loss": 0.9849, | |
| "step": 620500 | |
| }, | |
| { | |
| "epoch": 15.58, | |
| "learning_rate": 5.280321083657344e-07, | |
| "loss": 0.9952, | |
| "step": 621000 | |
| }, | |
| { | |
| "epoch": 15.59, | |
| "learning_rate": 5.123541954095072e-07, | |
| "loss": 1.0433, | |
| "step": 621500 | |
| }, | |
| { | |
| "epoch": 15.6, | |
| "learning_rate": 4.966762824532798e-07, | |
| "loss": 1.0284, | |
| "step": 622000 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 4.809983694970525e-07, | |
| "loss": 1.0445, | |
| "step": 622500 | |
| }, | |
| { | |
| "epoch": 15.63, | |
| "learning_rate": 4.653204565408253e-07, | |
| "loss": 0.9913, | |
| "step": 623000 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 4.49642543584598e-07, | |
| "loss": 1.0422, | |
| "step": 623500 | |
| }, | |
| { | |
| "epoch": 15.65, | |
| "learning_rate": 4.3396463062837077e-07, | |
| "loss": 0.9829, | |
| "step": 624000 | |
| }, | |
| { | |
| "epoch": 15.67, | |
| "learning_rate": 4.182867176721435e-07, | |
| "loss": 1.0181, | |
| "step": 624500 | |
| }, | |
| { | |
| "epoch": 15.68, | |
| "learning_rate": 4.026088047159162e-07, | |
| "loss": 0.985, | |
| "step": 625000 | |
| }, | |
| { | |
| "epoch": 15.69, | |
| "learning_rate": 3.8693089175968896e-07, | |
| "loss": 0.9835, | |
| "step": 625500 | |
| }, | |
| { | |
| "epoch": 15.7, | |
| "learning_rate": 3.712529788034617e-07, | |
| "loss": 1.0481, | |
| "step": 626000 | |
| }, | |
| { | |
| "epoch": 15.72, | |
| "learning_rate": 3.5557506584723444e-07, | |
| "loss": 1.002, | |
| "step": 626500 | |
| }, | |
| { | |
| "epoch": 15.73, | |
| "learning_rate": 3.3989715289100715e-07, | |
| "loss": 1.0452, | |
| "step": 627000 | |
| }, | |
| { | |
| "epoch": 15.74, | |
| "learning_rate": 3.2421923993477986e-07, | |
| "loss": 1.0042, | |
| "step": 627500 | |
| }, | |
| { | |
| "epoch": 15.75, | |
| "learning_rate": 3.0854132697855263e-07, | |
| "loss": 0.9587, | |
| "step": 628000 | |
| }, | |
| { | |
| "epoch": 15.77, | |
| "learning_rate": 2.928634140223254e-07, | |
| "loss": 1.0267, | |
| "step": 628500 | |
| }, | |
| { | |
| "epoch": 15.78, | |
| "learning_rate": 2.771855010660981e-07, | |
| "loss": 0.9982, | |
| "step": 629000 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 2.6150758810987087e-07, | |
| "loss": 1.0544, | |
| "step": 629500 | |
| }, | |
| { | |
| "epoch": 15.8, | |
| "learning_rate": 2.458296751536436e-07, | |
| "loss": 1.0435, | |
| "step": 630000 | |
| }, | |
| { | |
| "epoch": 15.82, | |
| "learning_rate": 2.3015176219741632e-07, | |
| "loss": 1.0338, | |
| "step": 630500 | |
| }, | |
| { | |
| "epoch": 15.83, | |
| "learning_rate": 2.1447384924118906e-07, | |
| "loss": 1.0075, | |
| "step": 631000 | |
| }, | |
| { | |
| "epoch": 15.84, | |
| "learning_rate": 1.9879593628496177e-07, | |
| "loss": 1.0073, | |
| "step": 631500 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 1.831180233287345e-07, | |
| "loss": 0.9963, | |
| "step": 632000 | |
| }, | |
| { | |
| "epoch": 15.87, | |
| "learning_rate": 1.6744011037250722e-07, | |
| "loss": 1.0126, | |
| "step": 632500 | |
| }, | |
| { | |
| "epoch": 15.88, | |
| "learning_rate": 1.5176219741627996e-07, | |
| "loss": 1.034, | |
| "step": 633000 | |
| }, | |
| { | |
| "epoch": 15.89, | |
| "learning_rate": 1.360842844600527e-07, | |
| "loss": 1.0527, | |
| "step": 633500 | |
| }, | |
| { | |
| "epoch": 15.9, | |
| "learning_rate": 1.2040637150382544e-07, | |
| "loss": 1.0447, | |
| "step": 634000 | |
| }, | |
| { | |
| "epoch": 15.92, | |
| "learning_rate": 1.0472845854759816e-07, | |
| "loss": 0.9632, | |
| "step": 634500 | |
| }, | |
| { | |
| "epoch": 15.93, | |
| "learning_rate": 8.905054559137089e-08, | |
| "loss": 1.0167, | |
| "step": 635000 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 7.337263263514361e-08, | |
| "loss": 1.0249, | |
| "step": 635500 | |
| }, | |
| { | |
| "epoch": 15.95, | |
| "learning_rate": 5.7694719678916344e-08, | |
| "loss": 0.9865, | |
| "step": 636000 | |
| }, | |
| { | |
| "epoch": 15.97, | |
| "learning_rate": 4.2016806722689076e-08, | |
| "loss": 0.982, | |
| "step": 636500 | |
| }, | |
| { | |
| "epoch": 15.98, | |
| "learning_rate": 2.633889376646181e-08, | |
| "loss": 1.0187, | |
| "step": 637000 | |
| }, | |
| { | |
| "epoch": 15.99, | |
| "learning_rate": 1.0660980810234543e-08, | |
| "loss": 1.0275, | |
| "step": 637500 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "eval_loss": 1.1107752323150635, | |
| "eval_runtime": 128.1911, | |
| "eval_samples_per_second": 276.431, | |
| "eval_steps_per_second": 34.558, | |
| "step": 637840 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "step": 637840, | |
| "total_flos": 1.0362059176376341e+17, | |
| "train_loss": 1.2091509097166406, | |
| "train_runtime": 77632.0272, | |
| "train_samples_per_second": 65.729, | |
| "train_steps_per_second": 8.216 | |
| } | |
| ], | |
| "max_steps": 637840, | |
| "num_train_epochs": 16, | |
| "total_flos": 1.0362059176376341e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |