| { | |
| "best_metric": 0.3041017949581146, | |
| "best_model_checkpoint": "test_training/checkpoint-77364", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 116046, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.978456818847699e-05, | |
| "loss": 0.5783, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9569136376953966e-05, | |
| "loss": 0.5134, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9353704565430954e-05, | |
| "loss": 0.4833, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9138272753907936e-05, | |
| "loss": 0.4697, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.892284094238492e-05, | |
| "loss": 0.462, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.87074091308619e-05, | |
| "loss": 0.4516, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.849197731933888e-05, | |
| "loss": 0.4486, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.827654550781587e-05, | |
| "loss": 0.4452, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.806111369629285e-05, | |
| "loss": 0.4285, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.7845681884769833e-05, | |
| "loss": 0.4319, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7630250073246815e-05, | |
| "loss": 0.4347, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7414818261723804e-05, | |
| "loss": 0.439, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7199386450200785e-05, | |
| "loss": 0.4191, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.698395463867777e-05, | |
| "loss": 0.4159, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6768522827154756e-05, | |
| "loss": 0.4179, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.655309101563173e-05, | |
| "loss": 0.4152, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.633765920410872e-05, | |
| "loss": 0.4118, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.61222273925857e-05, | |
| "loss": 0.3976, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.590679558106268e-05, | |
| "loss": 0.4025, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.569136376953967e-05, | |
| "loss": 0.4028, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.5475931958016646e-05, | |
| "loss": 0.418, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.5260500146493635e-05, | |
| "loss": 0.4044, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.5045068334970617e-05, | |
| "loss": 0.4069, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.48296365234476e-05, | |
| "loss": 0.3949, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.461420471192459e-05, | |
| "loss": 0.3833, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.439877290040157e-05, | |
| "loss": 0.3836, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.418334108887855e-05, | |
| "loss": 0.3923, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.396790927735553e-05, | |
| "loss": 0.3928, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.3752477465832514e-05, | |
| "loss": 0.3947, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.35370456543095e-05, | |
| "loss": 0.3778, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.3321613842786484e-05, | |
| "loss": 0.3938, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.3106182031263466e-05, | |
| "loss": 0.3875, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.289075021974045e-05, | |
| "loss": 0.3753, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.2675318408217436e-05, | |
| "loss": 0.3741, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.245988659669442e-05, | |
| "loss": 0.3771, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.22444547851714e-05, | |
| "loss": 0.3834, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.202902297364839e-05, | |
| "loss": 0.3813, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.181359116212536e-05, | |
| "loss": 0.3785, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.159815935060235e-05, | |
| "loss": 0.3707, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.138272753907933e-05, | |
| "loss": 0.376, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.1167295727556315e-05, | |
| "loss": 0.3594, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.0951863916033304e-05, | |
| "loss": 0.3738, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.073643210451028e-05, | |
| "loss": 0.3702, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.052100029298727e-05, | |
| "loss": 0.3536, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.030556848146425e-05, | |
| "loss": 0.367, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.009013666994123e-05, | |
| "loss": 0.3467, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.987470485841822e-05, | |
| "loss": 0.3616, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.96592730468952e-05, | |
| "loss": 0.3462, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.944384123537218e-05, | |
| "loss": 0.3712, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.9228409423849164e-05, | |
| "loss": 0.359, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.9012977612326146e-05, | |
| "loss": 0.3658, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.879754580080313e-05, | |
| "loss": 0.3608, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.8582113989280116e-05, | |
| "loss": 0.3664, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.83666821777571e-05, | |
| "loss": 0.3562, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.815125036623408e-05, | |
| "loss": 0.353, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.793581855471107e-05, | |
| "loss": 0.3545, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.7720386743188043e-05, | |
| "loss": 0.3489, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.750495493166503e-05, | |
| "loss": 0.3515, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.7289523120142014e-05, | |
| "loss": 0.3565, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.7074091308618995e-05, | |
| "loss": 0.3546, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.6858659497095984e-05, | |
| "loss": 0.3423, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.664322768557296e-05, | |
| "loss": 0.3411, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.642779587404995e-05, | |
| "loss": 0.3496, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.621236406252693e-05, | |
| "loss": 0.3595, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.599693225100391e-05, | |
| "loss": 0.3336, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.57815004394809e-05, | |
| "loss": 0.3424, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.556606862795788e-05, | |
| "loss": 0.341, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.535063681643486e-05, | |
| "loss": 0.3433, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.5135205004911845e-05, | |
| "loss": 0.342, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.491977319338883e-05, | |
| "loss": 0.3512, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.4704341381865815e-05, | |
| "loss": 0.3456, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.44889095703428e-05, | |
| "loss": 0.3339, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.427347775881978e-05, | |
| "loss": 0.3307, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.405804594729676e-05, | |
| "loss": 0.3423, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.384261413577375e-05, | |
| "loss": 0.3292, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.362718232425073e-05, | |
| "loss": 0.3359, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.341175051272771e-05, | |
| "loss": 0.3392, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.8719837907533616, | |
| "eval_loss": 0.30726560950279236, | |
| "eval_runtime": 71.3673, | |
| "eval_samples_per_second": 456.428, | |
| "eval_steps_per_second": 28.528, | |
| "step": 38682 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.31963187012047e-05, | |
| "loss": 0.2838, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.2980886889681676e-05, | |
| "loss": 0.2554, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.2765455078158664e-05, | |
| "loss": 0.2521, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.2550023266635646e-05, | |
| "loss": 0.2396, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.233459145511263e-05, | |
| "loss": 0.2519, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.2119159643589616e-05, | |
| "loss": 0.2513, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.190372783206659e-05, | |
| "loss": 0.2516, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.168829602054358e-05, | |
| "loss": 0.2565, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.147286420902056e-05, | |
| "loss": 0.2564, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.125743239749754e-05, | |
| "loss": 0.2565, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.104200058597453e-05, | |
| "loss": 0.2638, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.0826568774451514e-05, | |
| "loss": 0.2542, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.0611136962928495e-05, | |
| "loss": 0.2595, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.039570515140548e-05, | |
| "loss": 0.2608, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.0180273339882466e-05, | |
| "loss": 0.2533, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.9964841528359444e-05, | |
| "loss": 0.2616, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.974940971683643e-05, | |
| "loss": 0.2593, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.9533977905313408e-05, | |
| "loss": 0.2521, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9318546093790396e-05, | |
| "loss": 0.2483, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.910311428226738e-05, | |
| "loss": 0.2602, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.888768247074436e-05, | |
| "loss": 0.272, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.8672250659221345e-05, | |
| "loss": 0.2522, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.845681884769833e-05, | |
| "loss": 0.2565, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.8241387036175308e-05, | |
| "loss": 0.2454, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.8025955224652293e-05, | |
| "loss": 0.2563, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.7810523413129282e-05, | |
| "loss": 0.2707, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.759509160160626e-05, | |
| "loss": 0.2515, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.7379659790083245e-05, | |
| "loss": 0.2672, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.7164227978560224e-05, | |
| "loss": 0.2566, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.694879616703721e-05, | |
| "loss": 0.2509, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.6733364355514194e-05, | |
| "loss": 0.252, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.6517932543991176e-05, | |
| "loss": 0.2612, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.630250073246816e-05, | |
| "loss": 0.2505, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.6087068920945146e-05, | |
| "loss": 0.253, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.5871637109422124e-05, | |
| "loss": 0.252, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.565620529789911e-05, | |
| "loss": 0.2473, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5440773486376095e-05, | |
| "loss": 0.2518, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5225341674853076e-05, | |
| "loss": 0.2457, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.500990986333006e-05, | |
| "loss": 0.2636, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4794478051807043e-05, | |
| "loss": 0.2457, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4579046240284025e-05, | |
| "loss": 0.2541, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.436361442876101e-05, | |
| "loss": 0.2528, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.4148182617237995e-05, | |
| "loss": 0.2452, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.3932750805714977e-05, | |
| "loss": 0.2473, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.371731899419196e-05, | |
| "loss": 0.2401, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.3501887182668944e-05, | |
| "loss": 0.2404, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.3286455371145926e-05, | |
| "loss": 0.245, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.3071023559622907e-05, | |
| "loss": 0.2536, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2855591748099893e-05, | |
| "loss": 0.2495, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.2640159936576878e-05, | |
| "loss": 0.2603, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.242472812505386e-05, | |
| "loss": 0.2481, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.220929631353084e-05, | |
| "loss": 0.2504, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.1993864502007823e-05, | |
| "loss": 0.2395, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.1778432690484808e-05, | |
| "loss": 0.2468, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1563000878961793e-05, | |
| "loss": 0.242, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.1347569067438775e-05, | |
| "loss": 0.2369, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.113213725591576e-05, | |
| "loss": 0.2438, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.0916705444392742e-05, | |
| "loss": 0.2486, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0701273632869724e-05, | |
| "loss": 0.2562, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.048584182134671e-05, | |
| "loss": 0.2478, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0270410009823694e-05, | |
| "loss": 0.2526, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.0054978198300676e-05, | |
| "loss": 0.2419, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.9839546386777657e-05, | |
| "loss": 0.2447, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.962411457525464e-05, | |
| "loss": 0.234, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9408682763731624e-05, | |
| "loss": 0.248, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.919325095220861e-05, | |
| "loss": 0.2475, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.897781914068559e-05, | |
| "loss": 0.2499, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8762387329162576e-05, | |
| "loss": 0.2364, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.8546955517639558e-05, | |
| "loss": 0.2354, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.833152370611654e-05, | |
| "loss": 0.2375, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.811609189459352e-05, | |
| "loss": 0.2351, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.790066008307051e-05, | |
| "loss": 0.2351, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7685228271547492e-05, | |
| "loss": 0.2393, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7469796460024474e-05, | |
| "loss": 0.2326, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7254364648501455e-05, | |
| "loss": 0.2335, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.703893283697844e-05, | |
| "loss": 0.2387, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6823501025455422e-05, | |
| "loss": 0.2334, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.8915392644440351, | |
| "eval_loss": 0.3041017949581146, | |
| "eval_runtime": 71.8582, | |
| "eval_samples_per_second": 453.31, | |
| "eval_steps_per_second": 28.334, | |
| "step": 77364 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.6608069213932407e-05, | |
| "loss": 0.2249, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.6392637402409393e-05, | |
| "loss": 0.1615, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6177205590886374e-05, | |
| "loss": 0.1502, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.5961773779363356e-05, | |
| "loss": 0.1549, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5746341967840338e-05, | |
| "loss": 0.153, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5530910156317323e-05, | |
| "loss": 0.139, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.5315478344794308e-05, | |
| "loss": 0.1497, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.510004653327129e-05, | |
| "loss": 0.1654, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.4884614721748272e-05, | |
| "loss": 0.1495, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4669182910225257e-05, | |
| "loss": 0.1622, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.445375109870224e-05, | |
| "loss": 0.1653, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.4238319287179222e-05, | |
| "loss": 0.153, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.4022887475656207e-05, | |
| "loss": 0.162, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.380745566413319e-05, | |
| "loss": 0.1642, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.3592023852610172e-05, | |
| "loss": 0.161, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.3376592041087154e-05, | |
| "loss": 0.157, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.316116022956414e-05, | |
| "loss": 0.1679, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.2945728418041123e-05, | |
| "loss": 0.156, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2730296606518104e-05, | |
| "loss": 0.1549, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2514864794995088e-05, | |
| "loss": 0.1665, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2299432983472071e-05, | |
| "loss": 0.1591, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.2084001171949055e-05, | |
| "loss": 0.169, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.1868569360426038e-05, | |
| "loss": 0.1564, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1653137548903022e-05, | |
| "loss": 0.1682, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1437705737380005e-05, | |
| "loss": 0.1571, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1222273925856988e-05, | |
| "loss": 0.1506, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.1006842114333972e-05, | |
| "loss": 0.1638, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0791410302810955e-05, | |
| "loss": 0.1526, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0575978491287939e-05, | |
| "loss": 0.1586, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.036054667976492e-05, | |
| "loss": 0.1603, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0145114868241904e-05, | |
| "loss": 0.1607, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 9.929683056718887e-06, | |
| "loss": 0.1467, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.71425124519587e-06, | |
| "loss": 0.1663, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.498819433672854e-06, | |
| "loss": 0.1551, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.283387622149838e-06, | |
| "loss": 0.1411, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.067955810626821e-06, | |
| "loss": 0.1585, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.852523999103805e-06, | |
| "loss": 0.1658, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 8.637092187580788e-06, | |
| "loss": 0.1471, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.42166037605777e-06, | |
| "loss": 0.1494, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.206228564534755e-06, | |
| "loss": 0.1568, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 7.990796753011737e-06, | |
| "loss": 0.1564, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 7.77536494148872e-06, | |
| "loss": 0.1596, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.559933129965703e-06, | |
| "loss": 0.1503, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.344501318442687e-06, | |
| "loss": 0.1612, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.12906950691967e-06, | |
| "loss": 0.1474, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.913637695396653e-06, | |
| "loss": 0.1443, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.698205883873637e-06, | |
| "loss": 0.1498, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.48277407235062e-06, | |
| "loss": 0.1489, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.267342260827603e-06, | |
| "loss": 0.156, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.051910449304586e-06, | |
| "loss": 0.1463, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.83647863778157e-06, | |
| "loss": 0.1473, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.621046826258553e-06, | |
| "loss": 0.1594, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.405615014735536e-06, | |
| "loss": 0.1495, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.19018320321252e-06, | |
| "loss": 0.1592, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.974751391689502e-06, | |
| "loss": 0.1606, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.759319580166486e-06, | |
| "loss": 0.1444, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.543887768643469e-06, | |
| "loss": 0.1493, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.328455957120453e-06, | |
| "loss": 0.1582, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.113024145597435e-06, | |
| "loss": 0.162, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.897592334074419e-06, | |
| "loss": 0.1459, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.6821605225514017e-06, | |
| "loss": 0.1518, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.466728711028385e-06, | |
| "loss": 0.1448, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.251296899505369e-06, | |
| "loss": 0.1496, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.0358650879823517e-06, | |
| "loss": 0.1554, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.8204332764593355e-06, | |
| "loss": 0.1462, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.6050014649363185e-06, | |
| "loss": 0.1474, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.3895696534133016e-06, | |
| "loss": 0.1563, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.174137841890285e-06, | |
| "loss": 0.1501, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.958706030367268e-06, | |
| "loss": 0.1435, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.7432742188442513e-06, | |
| "loss": 0.1442, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.5278424073212347e-06, | |
| "loss": 0.1433, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.3124105957982181e-06, | |
| "loss": 0.1476, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.0969787842752014e-06, | |
| "loss": 0.1454, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.815469727521845e-07, | |
| "loss": 0.1552, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.661151612291677e-07, | |
| "loss": 0.1582, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.50683349706151e-07, | |
| "loss": 0.1521, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.352515381831343e-07, | |
| "loss": 0.1465, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.9819726660117542e-08, | |
| "loss": 0.1458, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.8964204580340148, | |
| "eval_loss": 0.4302307367324829, | |
| "eval_runtime": 73.0156, | |
| "eval_samples_per_second": 446.124, | |
| "eval_steps_per_second": 27.884, | |
| "step": 116046 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 116046, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 1.5266879033109472e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |