| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "global_step": 56765, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9562230247511674e-05, | |
| "loss": 2.0275, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.912269884611997e-05, | |
| "loss": 2.0037, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.868228662027658e-05, | |
| "loss": 1.9314, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.824187439443319e-05, | |
| "loss": 1.9222, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.78014621685898e-05, | |
| "loss": 1.9162, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.736104994274641e-05, | |
| "loss": 1.77, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.692151854135471e-05, | |
| "loss": 1.7844, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.648110631551132e-05, | |
| "loss": 1.8885, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.604069408966793e-05, | |
| "loss": 1.7094, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.560028186382454e-05, | |
| "loss": 1.8179, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.515986963798115e-05, | |
| "loss": 1.8352, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.471945741213776e-05, | |
| "loss": 1.7946, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.427904518629437e-05, | |
| "loss": 1.7258, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.383863296045098e-05, | |
| "loss": 1.671, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.339910155905928e-05, | |
| "loss": 1.8649, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.295868933321589e-05, | |
| "loss": 1.9734, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.25182771073725e-05, | |
| "loss": 1.8717, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.207786488152911e-05, | |
| "loss": 1.7005, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.163745265568572e-05, | |
| "loss": 1.6136, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.1197040429842334e-05, | |
| "loss": 1.7965, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.075662820399894e-05, | |
| "loss": 1.6852, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.031709680260724e-05, | |
| "loss": 1.5454, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.987668457676385e-05, | |
| "loss": 1.6019, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.943627235092046e-05, | |
| "loss": 1.7469, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.8995860125077074e-05, | |
| "loss": 1.8031, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.855544789923368e-05, | |
| "loss": 1.6901, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.811503567339029e-05, | |
| "loss": 1.6653, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.767462344754691e-05, | |
| "loss": 1.6627, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.723421122170352e-05, | |
| "loss": 1.6196, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.679379899586013e-05, | |
| "loss": 1.7333, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.635426759446842e-05, | |
| "loss": 1.6364, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.591385536862503e-05, | |
| "loss": 1.6258, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.547344314278164e-05, | |
| "loss": 1.7038, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.5033030916938255e-05, | |
| "loss": 1.6104, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 3.459438033999824e-05, | |
| "loss": 1.707, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.415396811415485e-05, | |
| "loss": 1.6641, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.371355588831146e-05, | |
| "loss": 1.7144, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.327314366246807e-05, | |
| "loss": 1.7934, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 3.283273143662468e-05, | |
| "loss": 1.6137, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 3.239231921078129e-05, | |
| "loss": 1.6195, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 3.1951906984937905e-05, | |
| "loss": 1.6228, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.151149475909451e-05, | |
| "loss": 1.5511, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 3.107196335770281e-05, | |
| "loss": 1.5842, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.063155113185942e-05, | |
| "loss": 1.6081, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.0191138906016033e-05, | |
| "loss": 1.6629, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.975072668017264e-05, | |
| "loss": 1.6513, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.9310314454329253e-05, | |
| "loss": 1.5899, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.8869902228485862e-05, | |
| "loss": 1.6249, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 2.8429490002642474e-05, | |
| "loss": 1.6052, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 2.7989077776799083e-05, | |
| "loss": 1.5361, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 2.7548665550955695e-05, | |
| "loss": 1.6174, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.7108253325112303e-05, | |
| "loss": 1.5097, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.66687219237206e-05, | |
| "loss": 1.4344, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.6228309697877214e-05, | |
| "loss": 1.6261, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 2.5787897472033822e-05, | |
| "loss": 1.5999, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 2.5347485246190434e-05, | |
| "loss": 1.5727, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 2.4907953844798733e-05, | |
| "loss": 1.5557, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 2.4467541618955345e-05, | |
| "loss": 1.7235, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.4027129393111957e-05, | |
| "loss": 1.5407, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.3586717167268565e-05, | |
| "loss": 1.5626, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.3146304941425177e-05, | |
| "loss": 1.6137, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.2706773540033472e-05, | |
| "loss": 1.6471, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 2.226636131419008e-05, | |
| "loss": 1.685, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.1825949088346693e-05, | |
| "loss": 1.5579, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.1385536862503305e-05, | |
| "loss": 1.5701, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.0945124636659917e-05, | |
| "loss": 1.6108, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.0505593235268212e-05, | |
| "loss": 1.6338, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.006518100942482e-05, | |
| "loss": 1.623, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.9624768783581433e-05, | |
| "loss": 1.5039, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.918435655773804e-05, | |
| "loss": 1.6072, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.8743944331894657e-05, | |
| "loss": 1.5008, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 1.8303532106051265e-05, | |
| "loss": 1.5741, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.7863119880207877e-05, | |
| "loss": 1.4937, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.7423588478816172e-05, | |
| "loss": 1.5352, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.698317625297278e-05, | |
| "loss": 1.6432, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.6542764027129393e-05, | |
| "loss": 1.5811, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.6102351801286005e-05, | |
| "loss": 1.5633, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.5661939575442617e-05, | |
| "loss": 1.5949, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.5222408174050912e-05, | |
| "loss": 1.5309, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.4781995948207522e-05, | |
| "loss": 1.4779, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.4341583722364133e-05, | |
| "loss": 1.5713, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 1.3901171496520743e-05, | |
| "loss": 1.521, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.3461640095129042e-05, | |
| "loss": 1.558, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.3021227869285652e-05, | |
| "loss": 1.5095, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.2580815643442262e-05, | |
| "loss": 1.4322, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.2140403417598874e-05, | |
| "loss": 1.558, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.1700872016207171e-05, | |
| "loss": 1.5619, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 1.1260459790363781e-05, | |
| "loss": 1.4401, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.0820047564520392e-05, | |
| "loss": 1.5182, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.0379635338677002e-05, | |
| "loss": 1.5103, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 9.939223112833612e-06, | |
| "loss": 1.5342, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 9.498810886990224e-06, | |
| "loss": 1.4571, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 9.059279485598521e-06, | |
| "loss": 1.4884, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 8.618867259755131e-06, | |
| "loss": 1.5288, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 8.178455033911742e-06, | |
| "loss": 1.6157, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 7.738042808068352e-06, | |
| "loss": 1.5245, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 7.29851140667665e-06, | |
| "loss": 1.4058, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 6.858099180833261e-06, | |
| "loss": 1.617, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 6.417686954989871e-06, | |
| "loss": 1.4707, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 5.977274729146481e-06, | |
| "loss": 1.4786, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 5.536862503303092e-06, | |
| "loss": 1.4711, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 5.097331101911389e-06, | |
| "loss": 1.4751, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 4.6569188760679995e-06, | |
| "loss": 1.5495, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 4.21650665022461e-06, | |
| "loss": 1.4748, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 3.7760944243812214e-06, | |
| "loss": 1.4816, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 3.3356821985378317e-06, | |
| "loss": 1.5052, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.896150797146129e-06, | |
| "loss": 1.4555, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.4557385713027396e-06, | |
| "loss": 1.4899, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.01532634545935e-06, | |
| "loss": 1.4849, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.5749141196159605e-06, | |
| "loss": 1.5955, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.1345018937725712e-06, | |
| "loss": 1.4426, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 6.940896679291817e-07, | |
| "loss": 1.4316, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.5367744208579233e-07, | |
| "loss": 1.5447, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 56765, | |
| "total_flos": 4.366478177206272e+16, | |
| "train_runtime": 14442.4553, | |
| "train_samples_per_second": 3.93, | |
| "train_steps_per_second": 3.93 | |
| } | |
| ], | |
| "max_steps": 56765, | |
| "num_train_epochs": 5, | |
| "total_flos": 4.366478177206272e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |