| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "global_step": 122720, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.991900260756193e-05, | |
| "loss": 0.8698, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.983751629726206e-05, | |
| "loss": 0.7184, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9756029986962192e-05, | |
| "loss": 0.6715, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9674543676662323e-05, | |
| "loss": 0.6418, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9593057366362454e-05, | |
| "loss": 0.6175, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9511734028683183e-05, | |
| "loss": 0.6239, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9430247718383314e-05, | |
| "loss": 0.5822, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9348761408083442e-05, | |
| "loss": 0.5878, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9267275097783573e-05, | |
| "loss": 0.5581, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9185788787483704e-05, | |
| "loss": 0.5579, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9104302477183832e-05, | |
| "loss": 0.5577, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9022816166883967e-05, | |
| "loss": 0.5564, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.8941329856584098e-05, | |
| "loss": 0.5554, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.8860006518904827e-05, | |
| "loss": 0.5376, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8778520208604955e-05, | |
| "loss": 0.5277, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.8697033898305086e-05, | |
| "loss": 0.5205, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8615547588005217e-05, | |
| "loss": 0.5252, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.8534061277705345e-05, | |
| "loss": 0.5378, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.8452737940026077e-05, | |
| "loss": 0.5264, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.837125162972621e-05, | |
| "loss": 0.524, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.828976531942634e-05, | |
| "loss": 0.5101, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8208279009126467e-05, | |
| "loss": 0.5028, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.81267926988266e-05, | |
| "loss": 0.5145, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.804546936114733e-05, | |
| "loss": 0.5175, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.796398305084746e-05, | |
| "loss": 0.5091, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.788249674054759e-05, | |
| "loss": 0.502, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.780101043024772e-05, | |
| "loss": 0.5066, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.7719524119947852e-05, | |
| "loss": 0.5093, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.763803780964798e-05, | |
| "loss": 0.5021, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.755655149934811e-05, | |
| "loss": 0.5129, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.747522816166884e-05, | |
| "loss": 0.4993, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.739374185136897e-05, | |
| "loss": 0.4913, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.7312255541069102e-05, | |
| "loss": 0.4961, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.7230769230769234e-05, | |
| "loss": 0.4865, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.7149282920469365e-05, | |
| "loss": 0.5059, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7067796610169493e-05, | |
| "loss": 0.5019, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.6986310299869624e-05, | |
| "loss": 0.4809, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.6904823989569755e-05, | |
| "loss": 0.484, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6823500651890484e-05, | |
| "loss": 0.4843, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.6742014341590612e-05, | |
| "loss": 0.4647, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6660528031290743e-05, | |
| "loss": 0.4703, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6579204693611475e-05, | |
| "loss": 0.4732, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6497718383311606e-05, | |
| "loss": 0.4914, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6416232073011734e-05, | |
| "loss": 0.4647, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6334745762711865e-05, | |
| "loss": 0.4749, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6253259452411997e-05, | |
| "loss": 0.4835, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6171936114732725e-05, | |
| "loss": 0.4741, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6090449804432857e-05, | |
| "loss": 0.4649, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6008963494132988e-05, | |
| "loss": 0.4664, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.592747718383312e-05, | |
| "loss": 0.3638, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5845990873533247e-05, | |
| "loss": 0.3646, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5764504563233378e-05, | |
| "loss": 0.3599, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.568301825293351e-05, | |
| "loss": 0.3495, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5601694915254238e-05, | |
| "loss": 0.3495, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.552020860495437e-05, | |
| "loss": 0.3526, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.54387222946545e-05, | |
| "loss": 0.3608, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5357235984354632e-05, | |
| "loss": 0.349, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.527591264667536e-05, | |
| "loss": 0.3538, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.519442633637549e-05, | |
| "loss": 0.3482, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.511294002607562e-05, | |
| "loss": 0.3652, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.503145371577575e-05, | |
| "loss": 0.3665, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.494996740547588e-05, | |
| "loss": 0.3628, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4868644067796611e-05, | |
| "loss": 0.356, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4787157757496742e-05, | |
| "loss": 0.3613, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4705834419817473e-05, | |
| "loss": 0.3662, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.4624348109517604e-05, | |
| "loss": 0.3461, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.4542861799217733e-05, | |
| "loss": 0.3552, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.4461375488917863e-05, | |
| "loss": 0.3655, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.4379889178617994e-05, | |
| "loss": 0.3433, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4298402868318124e-05, | |
| "loss": 0.3578, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4216916558018255e-05, | |
| "loss": 0.3497, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.4135430247718384e-05, | |
| "loss": 0.3614, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.4053943937418515e-05, | |
| "loss": 0.3488, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.3972457627118645e-05, | |
| "loss": 0.3531, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3890971316818776e-05, | |
| "loss": 0.3595, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.3809647979139507e-05, | |
| "loss": 0.353, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.3728161668839636e-05, | |
| "loss": 0.3523, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.3646675358539767e-05, | |
| "loss": 0.3652, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.3565189048239897e-05, | |
| "loss": 0.3504, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.3483865710560626e-05, | |
| "loss": 0.355, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.3402379400260757e-05, | |
| "loss": 0.3391, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3320893089960886e-05, | |
| "loss": 0.3525, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.3239569752281619e-05, | |
| "loss": 0.345, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.3158083441981748e-05, | |
| "loss": 0.3582, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.3076597131681878e-05, | |
| "loss": 0.3578, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2995110821382009e-05, | |
| "loss": 0.3525, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2913624511082138e-05, | |
| "loss": 0.3504, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.283213820078227e-05, | |
| "loss": 0.3531, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.27506518904824e-05, | |
| "loss": 0.3661, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.266916558018253e-05, | |
| "loss": 0.3519, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.258767926988266e-05, | |
| "loss": 0.3459, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.250635593220339e-05, | |
| "loss": 0.3635, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.242503259452412e-05, | |
| "loss": 0.3617, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.234354628422425e-05, | |
| "loss": 0.3668, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.226205997392438e-05, | |
| "loss": 0.3416, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2180573663624513e-05, | |
| "loss": 0.3568, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.2099087353324644e-05, | |
| "loss": 0.3688, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.2017601043024774e-05, | |
| "loss": 0.3383, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.1936114732724903e-05, | |
| "loss": 0.2534, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.1854628422425034e-05, | |
| "loss": 0.242, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.1773305084745763e-05, | |
| "loss": 0.2292, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.1691981747066494e-05, | |
| "loss": 0.2383, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.1610495436766625e-05, | |
| "loss": 0.2377, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.1529009126466754e-05, | |
| "loss": 0.2391, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1447685788787483e-05, | |
| "loss": 0.235, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1366199478487615e-05, | |
| "loss": 0.2349, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.1284713168187746e-05, | |
| "loss": 0.2543, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.1203226857887877e-05, | |
| "loss": 0.24, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.1121740547588006e-05, | |
| "loss": 0.2421, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.1040254237288138e-05, | |
| "loss": 0.2528, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.0958767926988267e-05, | |
| "loss": 0.2447, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0877281616688398e-05, | |
| "loss": 0.2375, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.0795795306388528e-05, | |
| "loss": 0.2414, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.0714308996088657e-05, | |
| "loss": 0.2362, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0632822685788789e-05, | |
| "loss": 0.2319, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0551499348109517e-05, | |
| "loss": 0.2485, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.047001303780965e-05, | |
| "loss": 0.238, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.038852672750978e-05, | |
| "loss": 0.2423, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.0307040417209911e-05, | |
| "loss": 0.2451, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.022555410691004e-05, | |
| "loss": 0.2468, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.014423076923077e-05, | |
| "loss": 0.238, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.00627444589309e-05, | |
| "loss": 0.2414, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 9.981258148631032e-06, | |
| "loss": 0.2559, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.899771838331161e-06, | |
| "loss": 0.2489, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.818285528031292e-06, | |
| "loss": 0.2485, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.736799217731422e-06, | |
| "loss": 0.2477, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.655312907431551e-06, | |
| "loss": 0.2515, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.573826597131683e-06, | |
| "loss": 0.2442, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.492503259452413e-06, | |
| "loss": 0.234, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.411179921773142e-06, | |
| "loss": 0.2615, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.329693611473273e-06, | |
| "loss": 0.2369, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.248207301173405e-06, | |
| "loss": 0.247, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.166720990873534e-06, | |
| "loss": 0.2511, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.085397653194265e-06, | |
| "loss": 0.2351, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 9.003911342894394e-06, | |
| "loss": 0.2503, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.922425032594525e-06, | |
| "loss": 0.2464, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.840938722294655e-06, | |
| "loss": 0.2415, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.759452411994786e-06, | |
| "loss": 0.2564, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.678129074315515e-06, | |
| "loss": 0.2434, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.596642764015646e-06, | |
| "loss": 0.2448, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.515156453715776e-06, | |
| "loss": 0.2488, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.433670143415907e-06, | |
| "loss": 0.2349, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.352183833116038e-06, | |
| "loss": 0.2553, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.270697522816167e-06, | |
| "loss": 0.2416, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.189211212516299e-06, | |
| "loss": 0.2438, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.107724902216428e-06, | |
| "loss": 0.2478, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.026401564537159e-06, | |
| "loss": 0.2412, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.944915254237288e-06, | |
| "loss": 0.1833, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.86342894393742e-06, | |
| "loss": 0.1688, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 7.781942633637549e-06, | |
| "loss": 0.1683, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.70045632333768e-06, | |
| "loss": 0.1695, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.61913298565841e-06, | |
| "loss": 0.1667, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 7.53780964797914e-06, | |
| "loss": 0.1758, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 7.456323337679271e-06, | |
| "loss": 0.1734, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 7.374837027379401e-06, | |
| "loss": 0.174, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 7.2933507170795315e-06, | |
| "loss": 0.1824, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.212027379400261e-06, | |
| "loss": 0.1622, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 7.130541069100392e-06, | |
| "loss": 0.1887, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.049054758800522e-06, | |
| "loss": 0.1878, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 6.967568448500652e-06, | |
| "loss": 0.1722, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.886082138200783e-06, | |
| "loss": 0.1736, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.804595827900913e-06, | |
| "loss": 0.1866, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 6.723109517601043e-06, | |
| "loss": 0.1774, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.6416232073011745e-06, | |
| "loss": 0.1675, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.560136897001305e-06, | |
| "loss": 0.1875, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.478650586701435e-06, | |
| "loss": 0.1884, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.397164276401565e-06, | |
| "loss": 0.1846, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.315677966101695e-06, | |
| "loss": 0.1778, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.234354628422426e-06, | |
| "loss": 0.1692, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.152868318122556e-06, | |
| "loss": 0.1748, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.071382007822686e-06, | |
| "loss": 0.172, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 5.989895697522816e-06, | |
| "loss": 0.1856, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.908572359843546e-06, | |
| "loss": 0.1588, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 5.827086049543678e-06, | |
| "loss": 0.1923, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 5.745599739243808e-06, | |
| "loss": 0.1648, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.664113428943938e-06, | |
| "loss": 0.162, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.582790091264668e-06, | |
| "loss": 0.1809, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 5.501466753585399e-06, | |
| "loss": 0.1782, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 5.419980443285528e-06, | |
| "loss": 0.176, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 5.3384941329856585e-06, | |
| "loss": 0.1841, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 5.257007822685789e-06, | |
| "loss": 0.1849, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.175521512385919e-06, | |
| "loss": 0.1708, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 5.0940352020860495e-06, | |
| "loss": 0.1832, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 5.012548891786181e-06, | |
| "loss": 0.1765, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.93106258148631e-06, | |
| "loss": 0.1833, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 4.8495762711864405e-06, | |
| "loss": 0.1775, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.768252933507171e-06, | |
| "loss": 0.1919, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.6867666232073015e-06, | |
| "loss": 0.1739, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.605280312907432e-06, | |
| "loss": 0.172, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 4.523794002607562e-06, | |
| "loss": 0.181, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.442470664928292e-06, | |
| "loss": 0.1637, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.360984354628422e-06, | |
| "loss": 0.1885, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 4.2794980443285535e-06, | |
| "loss": 0.1902, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.198011734028684e-06, | |
| "loss": 0.1764, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 4.1166883963494135e-06, | |
| "loss": 0.1653, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.035202086049544e-06, | |
| "loss": 0.1733, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.953715775749674e-06, | |
| "loss": 0.135, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.872392438070404e-06, | |
| "loss": 0.1212, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.790906127770535e-06, | |
| "loss": 0.1349, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.709419817470665e-06, | |
| "loss": 0.1152, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.6279335071707954e-06, | |
| "loss": 0.1307, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.5464471968709258e-06, | |
| "loss": 0.1244, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.4649608865710565e-06, | |
| "loss": 0.1292, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.3836375488917867e-06, | |
| "loss": 0.1246, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.302151238591917e-06, | |
| "loss": 0.1318, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.220664928292047e-06, | |
| "loss": 0.1125, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.1391786179921773e-06, | |
| "loss": 0.1328, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.057692307692308e-06, | |
| "loss": 0.1391, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9763689700130383e-06, | |
| "loss": 0.129, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8948826597131686e-06, | |
| "loss": 0.1314, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.813396349413299e-06, | |
| "loss": 0.1398, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.731910039113429e-06, | |
| "loss": 0.1177, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.650586701434159e-06, | |
| "loss": 0.1321, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.56910039113429e-06, | |
| "loss": 0.1126, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.48761408083442e-06, | |
| "loss": 0.1313, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.4061277705345505e-06, | |
| "loss": 0.1346, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.324641460234681e-06, | |
| "loss": 0.107, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.243155149934811e-06, | |
| "loss": 0.1301, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.1616688396349415e-06, | |
| "loss": 0.1159, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.080182529335072e-06, | |
| "loss": 0.1224, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 1.998859191655802e-06, | |
| "loss": 0.1253, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.917535853976532e-06, | |
| "loss": 0.1163, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.8360495436766626e-06, | |
| "loss": 0.1129, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.754563233376793e-06, | |
| "loss": 0.1409, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.673076923076923e-06, | |
| "loss": 0.1208, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.5917535853976534e-06, | |
| "loss": 0.1276, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.5102672750977838e-06, | |
| "loss": 0.1182, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.4287809647979139e-06, | |
| "loss": 0.1236, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.3472946544980445e-06, | |
| "loss": 0.1309, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 1.2658083441981748e-06, | |
| "loss": 0.1219, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.1843220338983051e-06, | |
| "loss": 0.1292, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.1028357235984355e-06, | |
| "loss": 0.1262, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.0213494132985658e-06, | |
| "loss": 0.1148, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 9.400260756192961e-07, | |
| "loss": 0.117, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 8.587027379400262e-07, | |
| "loss": 0.128, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 7.772164276401566e-07, | |
| "loss": 0.128, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 6.95730117340287e-07, | |
| "loss": 0.1416, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 6.142438070404173e-07, | |
| "loss": 0.1209, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.327574967405477e-07, | |
| "loss": 0.134, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 4.5127118644067805e-07, | |
| "loss": 0.1199, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.6978487614080835e-07, | |
| "loss": 0.1169, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.8829856584093875e-07, | |
| "loss": 0.1204, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.0697522816166886e-07, | |
| "loss": 0.1298, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.2548891786179923e-07, | |
| "loss": 0.1317, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 4.416558018252933e-08, | |
| "loss": 0.1255, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 122720, | |
| "total_flos": 1.2915645671077632e+17, | |
| "train_loss": 0.2863727584966936, | |
| "train_runtime": 10219.2193, | |
| "train_samples_per_second": 192.139, | |
| "train_steps_per_second": 12.009 | |
| } | |
| ], | |
| "max_steps": 122720, | |
| "num_train_epochs": 5, | |
| "total_flos": 1.2915645671077632e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |