| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 28.0, | |
| "global_step": 96768, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5e-09, | |
| "loss": 10.5075, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.5e-06, | |
| "loss": 9.5012, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 5e-06, | |
| "loss": 7.9626, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 7.5e-06, | |
| "loss": 6.9841, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1e-05, | |
| "loss": 6.7031, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.25e-05, | |
| "loss": 6.5413, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.5e-05, | |
| "loss": 6.4291, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.75e-05, | |
| "loss": 6.334, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2e-05, | |
| "loss": 6.2554, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.25e-05, | |
| "loss": 6.1905, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.5e-05, | |
| "loss": 6.1341, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.7500000000000004e-05, | |
| "loss": 6.0828, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 3e-05, | |
| "loss": 6.0371, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.2500000000000004e-05, | |
| "loss": 6.0035, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.5e-05, | |
| "loss": 5.9683, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 5.9306, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4e-05, | |
| "loss": 5.9022, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.2495e-05, | |
| "loss": 5.8759, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.4995000000000005e-05, | |
| "loss": 5.8494, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.7495e-05, | |
| "loss": 5.8293, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 4.9995000000000005e-05, | |
| "loss": 5.8098, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 4.998185660157389e-05, | |
| "loss": 5.7938, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 4.996364033809385e-05, | |
| "loss": 5.7751, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 4.9945424074613815e-05, | |
| "loss": 5.7603, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 4.992720781113378e-05, | |
| "loss": 5.7461, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.9909027980180706e-05, | |
| "loss": 5.7323, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.9890811716700676e-05, | |
| "loss": 5.7245, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.987259545322064e-05, | |
| "loss": 5.7123, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 4.98543791897406e-05, | |
| "loss": 5.7033, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 4.983619935878753e-05, | |
| "loss": 5.6966, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 4.981798309530749e-05, | |
| "loss": 5.6868, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 4.979976683182746e-05, | |
| "loss": 5.6794, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 4.9781550568347426e-05, | |
| "loss": 5.6726, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 4.976337073739435e-05, | |
| "loss": 5.6679, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 4.974515447391431e-05, | |
| "loss": 5.6571, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 4.972697464296124e-05, | |
| "loss": 5.6521, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 4.97087583794812e-05, | |
| "loss": 5.6461, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 4.969054211600117e-05, | |
| "loss": 5.6394, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 4.9672325852521135e-05, | |
| "loss": 5.638, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 4.96541095890411e-05, | |
| "loss": 5.6325, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 4.9635929758088026e-05, | |
| "loss": 5.6291, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 4.961771349460799e-05, | |
| "loss": 5.6244, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 4.959949723112795e-05, | |
| "loss": 5.62, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 4.958128096764792e-05, | |
| "loss": 5.6133, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 4.9563064704167886e-05, | |
| "loss": 5.6107, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 4.954484844068785e-05, | |
| "loss": 5.6072, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 4.952666860973477e-05, | |
| "loss": 5.5987, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 4.950845234625474e-05, | |
| "loss": 5.598, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 4.9490236082774703e-05, | |
| "loss": 5.593, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 4.947201981929467e-05, | |
| "loss": 5.5922, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 4.9453839988341595e-05, | |
| "loss": 5.5881, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 4.943562372486156e-05, | |
| "loss": 5.5834, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 4.941740746138153e-05, | |
| "loss": 5.5838, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 4.939919119790149e-05, | |
| "loss": 5.5782, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 4.9380974934421454e-05, | |
| "loss": 5.58, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 4.936279510346838e-05, | |
| "loss": 5.5751, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 4.9344578839988345e-05, | |
| "loss": 5.5704, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 4.932636257650831e-05, | |
| "loss": 5.5679, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 4.930814631302827e-05, | |
| "loss": 5.5643, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 4.928993004954824e-05, | |
| "loss": 5.5602, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 4.9271713786068205e-05, | |
| "loss": 5.5599, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 4.9253533955115126e-05, | |
| "loss": 5.5597, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 4.9235317691635096e-05, | |
| "loss": 5.5553, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.921710142815506e-05, | |
| "loss": 5.554, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 4.919888516467502e-05, | |
| "loss": 5.5466, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 4.918070533372195e-05, | |
| "loss": 5.5485, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 4.9162489070241914e-05, | |
| "loss": 5.5428, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 4.9144272806761884e-05, | |
| "loss": 5.5481, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 4.912605654328185e-05, | |
| "loss": 5.5396, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 4.910784027980181e-05, | |
| "loss": 5.5398, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 4.908966044884873e-05, | |
| "loss": 5.5342, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 10.27, | |
| "learning_rate": 4.90714441853687e-05, | |
| "loss": 5.5344, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 4.9053227921888664e-05, | |
| "loss": 5.5328, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 4.903501165840863e-05, | |
| "loss": 5.5305, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 4.9016831827455556e-05, | |
| "loss": 5.5289, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 4.899861556397552e-05, | |
| "loss": 5.5273, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 4.898039930049548e-05, | |
| "loss": 5.5278, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 11.14, | |
| "learning_rate": 4.896218303701545e-05, | |
| "loss": 5.5238, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 11.28, | |
| "learning_rate": 4.894400320606237e-05, | |
| "loss": 5.5243, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 4.892578694258234e-05, | |
| "loss": 5.5204, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 11.57, | |
| "learning_rate": 4.8907570679102306e-05, | |
| "loss": 5.4293, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 4.888935441562227e-05, | |
| "loss": 5.2003, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 11.86, | |
| "learning_rate": 4.887113815214224e-05, | |
| "loss": 5.0337, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 12.01, | |
| "learning_rate": 4.88529218886622e-05, | |
| "loss": 4.8789, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 4.8834742057709124e-05, | |
| "loss": 4.7277, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 12.3, | |
| "learning_rate": 4.881652579422909e-05, | |
| "loss": 4.5847, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 4.879830953074905e-05, | |
| "loss": 4.4549, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 4.878009326726902e-05, | |
| "loss": 4.3158, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 12.73, | |
| "learning_rate": 4.8761877003788984e-05, | |
| "loss": 4.1826, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 12.88, | |
| "learning_rate": 4.874366074030895e-05, | |
| "loss": 4.0502, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 4.872544447682891e-05, | |
| "loss": 3.8966, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 13.17, | |
| "learning_rate": 4.870722821334888e-05, | |
| "loss": 3.645, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 13.31, | |
| "learning_rate": 4.868904838239581e-05, | |
| "loss": 3.1092, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 4.8670868551442736e-05, | |
| "loss": 2.6286, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 4.86526522879627e-05, | |
| "loss": 2.419, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 4.863443602448266e-05, | |
| "loss": 2.2904, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 4.8616219761002625e-05, | |
| "loss": 2.1957, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 4.8598003497522595e-05, | |
| "loss": 2.1272, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 14.18, | |
| "learning_rate": 4.8579823666569517e-05, | |
| "loss": 2.0666, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 4.856160740308948e-05, | |
| "loss": 2.0147, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 4.854339113960944e-05, | |
| "loss": 1.9653, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 14.61, | |
| "learning_rate": 4.8525174876129406e-05, | |
| "loss": 1.9253, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 14.76, | |
| "learning_rate": 4.8506958612649376e-05, | |
| "loss": 1.8919, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 14.9, | |
| "learning_rate": 4.8488778781696304e-05, | |
| "loss": 1.8565, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 15.05, | |
| "learning_rate": 4.847056251821627e-05, | |
| "loss": 1.8254, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 4.845234625473623e-05, | |
| "loss": 1.7994, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 15.34, | |
| "learning_rate": 4.843416642378316e-05, | |
| "loss": 1.7745, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 15.48, | |
| "learning_rate": 4.841595016030312e-05, | |
| "loss": 1.7497, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 4.839773389682309e-05, | |
| "loss": 1.7284, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 15.77, | |
| "learning_rate": 4.8379517633343055e-05, | |
| "loss": 1.7083, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 15.91, | |
| "learning_rate": 4.836130136986302e-05, | |
| "loss": 1.6876, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 16.06, | |
| "learning_rate": 4.834308510638298e-05, | |
| "loss": 1.6699, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 16.2, | |
| "learning_rate": 4.832486884290295e-05, | |
| "loss": 1.6482, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 16.35, | |
| "learning_rate": 4.8306652579422915e-05, | |
| "loss": 1.6352, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 16.49, | |
| "learning_rate": 4.828843631594288e-05, | |
| "loss": 1.6164, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 16.64, | |
| "learning_rate": 4.827022005246284e-05, | |
| "loss": 1.601, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 16.78, | |
| "learning_rate": 4.825204022150976e-05, | |
| "loss": 1.5858, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 16.93, | |
| "learning_rate": 4.823382395802973e-05, | |
| "loss": 1.5718, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 17.07, | |
| "learning_rate": 4.8215607694549695e-05, | |
| "loss": 1.5598, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 4.819739143106966e-05, | |
| "loss": 1.5424, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 17.36, | |
| "learning_rate": 4.817917516758962e-05, | |
| "loss": 1.5323, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 17.51, | |
| "learning_rate": 4.816099533663655e-05, | |
| "loss": 1.5216, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 17.65, | |
| "learning_rate": 4.814277907315652e-05, | |
| "loss": 1.511, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 17.8, | |
| "learning_rate": 4.812456280967648e-05, | |
| "loss": 1.5005, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 4.8106346546196446e-05, | |
| "loss": 1.488, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 18.08, | |
| "learning_rate": 4.8088166715243374e-05, | |
| "loss": 1.4788, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 18.23, | |
| "learning_rate": 4.806995045176334e-05, | |
| "loss": 1.4691, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 18.37, | |
| "learning_rate": 4.80517341882833e-05, | |
| "loss": 1.4588, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 18.52, | |
| "learning_rate": 4.803355435733023e-05, | |
| "loss": 1.4515, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 18.66, | |
| "learning_rate": 4.801533809385019e-05, | |
| "loss": 1.4426, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 4.7997121830370155e-05, | |
| "loss": 1.4344, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 18.95, | |
| "learning_rate": 4.797890556689012e-05, | |
| "loss": 1.4257, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 19.1, | |
| "learning_rate": 4.796068930341009e-05, | |
| "loss": 1.4187, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 19.24, | |
| "learning_rate": 4.794247303993005e-05, | |
| "loss": 1.4092, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 19.39, | |
| "learning_rate": 4.7924256776450015e-05, | |
| "loss": 1.4034, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 4.790604051296998e-05, | |
| "loss": 1.3929, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 19.68, | |
| "learning_rate": 4.7887860682016906e-05, | |
| "loss": 1.3877, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 19.82, | |
| "learning_rate": 4.7869680851063833e-05, | |
| "loss": 1.3808, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 19.97, | |
| "learning_rate": 4.78514645875838e-05, | |
| "loss": 1.375, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 20.11, | |
| "learning_rate": 4.783324832410376e-05, | |
| "loss": 1.3667, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 20.25, | |
| "learning_rate": 4.781503206062373e-05, | |
| "loss": 1.3623, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 20.4, | |
| "learning_rate": 4.779681579714369e-05, | |
| "loss": 1.3533, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 20.54, | |
| "learning_rate": 4.7778599533663656e-05, | |
| "loss": 1.3478, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 20.69, | |
| "learning_rate": 4.7760419702710584e-05, | |
| "loss": 1.3446, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 4.774220343923055e-05, | |
| "loss": 1.338, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 20.98, | |
| "learning_rate": 4.772398717575051e-05, | |
| "loss": 1.333, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 21.12, | |
| "learning_rate": 4.7705770912270474e-05, | |
| "loss": 1.3274, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 21.27, | |
| "learning_rate": 4.7687554648790444e-05, | |
| "loss": 1.3204, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 21.41, | |
| "learning_rate": 4.766933838531041e-05, | |
| "loss": 1.3178, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 21.56, | |
| "learning_rate": 4.765112212183037e-05, | |
| "loss": 1.3109, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 21.7, | |
| "learning_rate": 4.76329422908773e-05, | |
| "loss": 1.3055, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 21.85, | |
| "learning_rate": 4.761472602739726e-05, | |
| "loss": 1.2995, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 21.99, | |
| "learning_rate": 4.759654619644419e-05, | |
| "loss": 1.2955, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 22.14, | |
| "learning_rate": 4.757832993296415e-05, | |
| "loss": 1.2903, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 22.28, | |
| "learning_rate": 4.7560113669484116e-05, | |
| "loss": 1.2848, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 22.42, | |
| "learning_rate": 4.754189740600408e-05, | |
| "loss": 1.2824, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 22.57, | |
| "learning_rate": 4.752368114252405e-05, | |
| "loss": 1.2784, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 22.71, | |
| "learning_rate": 4.750546487904401e-05, | |
| "loss": 1.2747, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 22.86, | |
| "learning_rate": 4.748728504809094e-05, | |
| "loss": 1.2678, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "learning_rate": 4.74690687846109e-05, | |
| "loss": 1.2636, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 23.15, | |
| "learning_rate": 4.7450852521130867e-05, | |
| "loss": 1.2593, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 23.29, | |
| "learning_rate": 4.743263625765083e-05, | |
| "loss": 1.2539, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 23.44, | |
| "learning_rate": 4.74144199941708e-05, | |
| "loss": 1.2525, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 23.58, | |
| "learning_rate": 4.739620373069076e-05, | |
| "loss": 1.2484, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 23.73, | |
| "learning_rate": 4.7377987467210726e-05, | |
| "loss": 1.2431, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 23.87, | |
| "learning_rate": 4.735977120373069e-05, | |
| "loss": 1.2402, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 24.02, | |
| "learning_rate": 4.734159137277762e-05, | |
| "loss": 1.2358, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 24.16, | |
| "learning_rate": 4.732337510929759e-05, | |
| "loss": 1.2315, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 24.31, | |
| "learning_rate": 4.730515884581755e-05, | |
| "loss": 1.2275, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 24.45, | |
| "learning_rate": 4.7286942582337514e-05, | |
| "loss": 1.2223, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 24.59, | |
| "learning_rate": 4.726872631885748e-05, | |
| "loss": 1.2205, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 24.74, | |
| "learning_rate": 4.725051005537745e-05, | |
| "loss": 1.2182, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 24.88, | |
| "learning_rate": 4.723233022442437e-05, | |
| "loss": 1.216, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 25.03, | |
| "learning_rate": 4.721411396094433e-05, | |
| "loss": 1.2103, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 25.17, | |
| "learning_rate": 4.7195897697464295e-05, | |
| "loss": 1.206, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 25.32, | |
| "learning_rate": 4.717768143398426e-05, | |
| "loss": 1.206, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 25.46, | |
| "learning_rate": 4.715946517050423e-05, | |
| "loss": 1.2014, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 25.61, | |
| "learning_rate": 4.7141285339551156e-05, | |
| "loss": 1.199, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 25.75, | |
| "learning_rate": 4.712306907607112e-05, | |
| "loss": 1.195, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 25.9, | |
| "learning_rate": 4.710485281259108e-05, | |
| "loss": 1.1918, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 26.04, | |
| "learning_rate": 4.7086636549111045e-05, | |
| "loss": 1.1878, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 26.19, | |
| "learning_rate": 4.7068420285631015e-05, | |
| "loss": 1.1847, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 26.33, | |
| "learning_rate": 4.705020402215098e-05, | |
| "loss": 1.1833, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 26.48, | |
| "learning_rate": 4.703198775867094e-05, | |
| "loss": 1.1797, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 26.62, | |
| "learning_rate": 4.701377149519091e-05, | |
| "loss": 1.178, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 26.77, | |
| "learning_rate": 4.6995555231710875e-05, | |
| "loss": 1.1754, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 26.91, | |
| "learning_rate": 4.6977411833284754e-05, | |
| "loss": 1.1705, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 27.05, | |
| "learning_rate": 4.6959195569804724e-05, | |
| "loss": 1.168, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 27.2, | |
| "learning_rate": 4.694101573885165e-05, | |
| "loss": 1.1667, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 4.6922799475371615e-05, | |
| "loss": 1.1635, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 27.49, | |
| "learning_rate": 4.690458321189158e-05, | |
| "loss": 1.1615, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 27.63, | |
| "learning_rate": 4.688636694841154e-05, | |
| "loss": 1.1577, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 4.686815068493151e-05, | |
| "loss": 1.1564, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 27.92, | |
| "learning_rate": 4.6849934421451475e-05, | |
| "loss": 1.1531, | |
| "step": 96500 | |
| } | |
| ], | |
| "max_steps": 1382400, | |
| "num_train_epochs": 400, | |
| "total_flos": 2.607846263447198e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |