| { | |
| "best_metric": 0.0237874872982502, | |
| "best_model_checkpoint": "./outputs/checkpoint-1405", | |
| "epoch": 5.0, | |
| "global_step": 1405, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.9857651245551603e-05, | |
| "loss": 0.6071, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9715302491103204e-05, | |
| "loss": 0.5349, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9572953736654805e-05, | |
| "loss": 0.3979, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9430604982206406e-05, | |
| "loss": 0.338, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9288256227758007e-05, | |
| "loss": 0.314, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9145907473309612e-05, | |
| "loss": 0.2133, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9003558718861213e-05, | |
| "loss": 0.197, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.886120996441281e-05, | |
| "loss": 0.1823, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.8718861209964415e-05, | |
| "loss": 0.159, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8576512455516017e-05, | |
| "loss": 0.162, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.8434163701067618e-05, | |
| "loss": 0.1466, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.829181494661922e-05, | |
| "loss": 0.1919, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.814946619217082e-05, | |
| "loss": 0.0872, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.800711743772242e-05, | |
| "loss": 0.1353, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7864768683274022e-05, | |
| "loss": 0.1294, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.7722419928825624e-05, | |
| "loss": 0.0948, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7580071174377225e-05, | |
| "loss": 0.1228, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7437722419928826e-05, | |
| "loss": 0.1502, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.7295373665480427e-05, | |
| "loss": 0.1073, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.715302491103203e-05, | |
| "loss": 0.0488, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7010676156583633e-05, | |
| "loss": 0.18, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.686832740213523e-05, | |
| "loss": 0.1082, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6725978647686835e-05, | |
| "loss": 0.0977, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6583629893238436e-05, | |
| "loss": 0.1078, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6441281138790037e-05, | |
| "loss": 0.0355, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.629893238434164e-05, | |
| "loss": 0.1555, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.615658362989324e-05, | |
| "loss": 0.0614, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.601423487544484e-05, | |
| "loss": 0.0548, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9798488664987406, | |
| "eval_loss": 0.06160992756485939, | |
| "eval_runtime": 74.4135, | |
| "eval_samples_per_second": 5.335, | |
| "eval_steps_per_second": 0.672, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5871886120996442e-05, | |
| "loss": 0.1327, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5729537366548043e-05, | |
| "loss": 0.0859, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5587188612099648e-05, | |
| "loss": 0.0389, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.5444839857651245e-05, | |
| "loss": 0.1412, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5302491103202847e-05, | |
| "loss": 0.0414, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.516014234875445e-05, | |
| "loss": 0.0825, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.5017793594306052e-05, | |
| "loss": 0.1417, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4875444839857654e-05, | |
| "loss": 0.0896, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4733096085409253e-05, | |
| "loss": 0.1049, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.4590747330960854e-05, | |
| "loss": 0.1624, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.4448398576512457e-05, | |
| "loss": 0.0422, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4306049822064058e-05, | |
| "loss": 0.0757, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4163701067615661e-05, | |
| "loss": 0.0572, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.402135231316726e-05, | |
| "loss": 0.0887, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3879003558718862e-05, | |
| "loss": 0.0711, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.3736654804270464e-05, | |
| "loss": 0.0399, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.3594306049822066e-05, | |
| "loss": 0.1151, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.3451957295373668e-05, | |
| "loss": 0.0224, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3309608540925268e-05, | |
| "loss": 0.0326, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.3167259786476869e-05, | |
| "loss": 0.0245, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.302491103202847e-05, | |
| "loss": 0.0582, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.2882562277580073e-05, | |
| "loss": 0.0277, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.2740213523131673e-05, | |
| "loss": 0.029, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2597864768683274e-05, | |
| "loss": 0.1294, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2455516014234877e-05, | |
| "loss": 0.0685, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.2313167259786478e-05, | |
| "loss": 0.0502, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.217081850533808e-05, | |
| "loss": 0.0737, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.202846975088968e-05, | |
| "loss": 0.1366, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9899244332493703, | |
| "eval_loss": 0.033983081579208374, | |
| "eval_runtime": 68.9693, | |
| "eval_samples_per_second": 5.756, | |
| "eval_steps_per_second": 0.725, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.1886120996441281e-05, | |
| "loss": 0.0415, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.1743772241992884e-05, | |
| "loss": 0.0728, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.1601423487544485e-05, | |
| "loss": 0.0908, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1459074733096086e-05, | |
| "loss": 0.0763, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.1316725978647688e-05, | |
| "loss": 0.0673, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.1174377224199289e-05, | |
| "loss": 0.0342, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.103202846975089e-05, | |
| "loss": 0.1702, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0889679715302493e-05, | |
| "loss": 0.1066, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.0747330960854094e-05, | |
| "loss": 0.0286, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0604982206405693e-05, | |
| "loss": 0.0598, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0462633451957296e-05, | |
| "loss": 0.0357, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.0320284697508897e-05, | |
| "loss": 0.0357, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.01779359430605e-05, | |
| "loss": 0.0513, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.0035587188612101e-05, | |
| "loss": 0.102, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.893238434163703e-06, | |
| "loss": 0.0238, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.750889679715304e-06, | |
| "loss": 0.125, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.608540925266905e-06, | |
| "loss": 0.0175, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.466192170818506e-06, | |
| "loss": 0.08, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.323843416370107e-06, | |
| "loss": 0.0481, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.18149466192171e-06, | |
| "loss": 0.0472, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.03914590747331e-06, | |
| "loss": 0.104, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.896797153024912e-06, | |
| "loss": 0.0658, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.754448398576513e-06, | |
| "loss": 0.0218, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.612099644128115e-06, | |
| "loss": 0.0675, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.469750889679716e-06, | |
| "loss": 0.0443, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.327402135231317e-06, | |
| "loss": 0.0131, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.18505338078292e-06, | |
| "loss": 0.0516, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.04270462633452e-06, | |
| "loss": 0.0218, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9874055415617129, | |
| "eval_loss": 0.043008677661418915, | |
| "eval_runtime": 71.679, | |
| "eval_samples_per_second": 5.539, | |
| "eval_steps_per_second": 0.698, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.900355871886122e-06, | |
| "loss": 0.0286, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 7.758007117437723e-06, | |
| "loss": 0.0847, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.6156583629893245e-06, | |
| "loss": 0.0136, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 7.4733096085409265e-06, | |
| "loss": 0.0341, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.330960854092527e-06, | |
| "loss": 0.008, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.188612099644129e-06, | |
| "loss": 0.0331, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.04626334519573e-06, | |
| "loss": 0.0187, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 6.903914590747331e-06, | |
| "loss": 0.0155, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 6.761565836298933e-06, | |
| "loss": 0.0344, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 6.619217081850534e-06, | |
| "loss": 0.0745, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.476868327402136e-06, | |
| "loss": 0.024, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.334519572953737e-06, | |
| "loss": 0.0764, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 6.192170818505339e-06, | |
| "loss": 0.0163, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.049822064056941e-06, | |
| "loss": 0.0163, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.907473309608541e-06, | |
| "loss": 0.0265, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.765124555160143e-06, | |
| "loss": 0.0268, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.622775800711744e-06, | |
| "loss": 0.0242, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 5.480427046263346e-06, | |
| "loss": 0.0103, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 5.338078291814946e-06, | |
| "loss": 0.111, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.195729537366548e-06, | |
| "loss": 0.0368, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 5.05338078291815e-06, | |
| "loss": 0.0599, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.911032028469751e-06, | |
| "loss": 0.0894, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.768683274021353e-06, | |
| "loss": 0.0144, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 4.626334519572954e-06, | |
| "loss": 0.0337, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.483985765124556e-06, | |
| "loss": 0.0204, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.341637010676157e-06, | |
| "loss": 0.0226, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.199288256227758e-06, | |
| "loss": 0.0826, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.05693950177936e-06, | |
| "loss": 0.0403, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.9874055415617129, | |
| "eval_loss": 0.04059485346078873, | |
| "eval_runtime": 69.4464, | |
| "eval_samples_per_second": 5.717, | |
| "eval_steps_per_second": 0.72, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.914590747330961e-06, | |
| "loss": 0.0569, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.7722419928825625e-06, | |
| "loss": 0.0302, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.629893238434164e-06, | |
| "loss": 0.0795, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4875444839857653e-06, | |
| "loss": 0.0749, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.3451957295373664e-06, | |
| "loss": 0.0384, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.2028469750889684e-06, | |
| "loss": 0.043, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.0604982206405696e-06, | |
| "loss": 0.0306, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.918149466192171e-06, | |
| "loss": 0.0181, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.7758007117437723e-06, | |
| "loss": 0.0289, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.633451957295374e-06, | |
| "loss": 0.0631, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.4911032028469755e-06, | |
| "loss": 0.0454, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.3487544483985766e-06, | |
| "loss": 0.0461, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.2064056939501782e-06, | |
| "loss": 0.0233, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.0640569395017794e-06, | |
| "loss": 0.0123, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.921708185053381e-06, | |
| "loss": 0.075, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.7793594306049826e-06, | |
| "loss": 0.0131, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.6370106761565837e-06, | |
| "loss": 0.0068, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.494661921708185e-06, | |
| "loss": 0.0212, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.3523131672597867e-06, | |
| "loss": 0.0062, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.209964412811388e-06, | |
| "loss": 0.075, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.0676156583629894e-06, | |
| "loss": 0.0776, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 9.252669039145908e-07, | |
| "loss": 0.0238, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 7.829181494661923e-07, | |
| "loss": 0.0174, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 6.405693950177936e-07, | |
| "loss": 0.1009, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 4.982206405693951e-07, | |
| "loss": 0.0823, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.558718861209965e-07, | |
| "loss": 0.091, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.135231316725979e-07, | |
| "loss": 0.0182, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 7.117437722419929e-08, | |
| "loss": 0.0184, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.9949622166246851, | |
| "eval_loss": 0.0237874872982502, | |
| "eval_runtime": 70.1815, | |
| "eval_samples_per_second": 5.657, | |
| "eval_steps_per_second": 0.712, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 1405, | |
| "total_flos": 8.694601234757222e+17, | |
| "train_loss": 0.08033438997336553, | |
| "train_runtime": 6338.3889, | |
| "train_samples_per_second": 1.77, | |
| "train_steps_per_second": 0.222 | |
| } | |
| ], | |
| "max_steps": 1405, | |
| "num_train_epochs": 5, | |
| "total_flos": 8.694601234757222e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |