| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.3943855786431816, | |
| "eval_steps": 500, | |
| "global_step": 87000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.977065272235219e-05, | |
| "loss": 3.2765, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.954130544470437e-05, | |
| "loss": 2.8884, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.931195816705656e-05, | |
| "loss": 2.7582, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.908261088940875e-05, | |
| "loss": 2.6689, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.885326361176093e-05, | |
| "loss": 2.6055, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.862391633411312e-05, | |
| "loss": 2.5549, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.83945690564653e-05, | |
| "loss": 2.5199, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.816522177881749e-05, | |
| "loss": 2.4834, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.7935874501169674e-05, | |
| "loss": 2.4538, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.770652722352186e-05, | |
| "loss": 2.4273, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.7477179945874044e-05, | |
| "loss": 2.4007, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7247832668226236e-05, | |
| "loss": 2.3784, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.7018485390578414e-05, | |
| "loss": 2.3597, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.67891381129306e-05, | |
| "loss": 2.3433, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.6559790835282785e-05, | |
| "loss": 2.3291, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.633044355763497e-05, | |
| "loss": 2.3127, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.610109627998716e-05, | |
| "loss": 2.3013, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.587174900233934e-05, | |
| "loss": 2.2863, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.564240172469153e-05, | |
| "loss": 2.2772, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.5413054447043716e-05, | |
| "loss": 2.2645, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.51837071693959e-05, | |
| "loss": 2.2523, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.4954359891748087e-05, | |
| "loss": 2.2424, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.472501261410027e-05, | |
| "loss": 2.2344, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.4495665336452457e-05, | |
| "loss": 2.2204, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.426631805880465e-05, | |
| "loss": 2.2142, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.4036970781156827e-05, | |
| "loss": 2.2074, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.380762350350902e-05, | |
| "loss": 2.1972, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.3578276225861203e-05, | |
| "loss": 2.1857, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.334892894821339e-05, | |
| "loss": 2.1819, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.3119581670565573e-05, | |
| "loss": 2.1736, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.289023439291776e-05, | |
| "loss": 2.165, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.2660887115269944e-05, | |
| "loss": 2.1599, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.243153983762213e-05, | |
| "loss": 2.1566, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.2202192559974314e-05, | |
| "loss": 2.1496, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.19728452823265e-05, | |
| "loss": 2.1421, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.174349800467869e-05, | |
| "loss": 2.138, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.151415072703087e-05, | |
| "loss": 2.1319, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.128480344938306e-05, | |
| "loss": 2.1262, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.1055456171735245e-05, | |
| "loss": 2.1194, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.082610889408743e-05, | |
| "loss": 2.1162, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.0596761616439616e-05, | |
| "loss": 2.1084, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.03674143387918e-05, | |
| "loss": 2.1045, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.0138067061143986e-05, | |
| "loss": 2.0962, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.990871978349617e-05, | |
| "loss": 2.0967, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.9679372505848356e-05, | |
| "loss": 2.0875, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.945002522820054e-05, | |
| "loss": 2.0823, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.922067795055273e-05, | |
| "loss": 2.0831, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.899133067290491e-05, | |
| "loss": 2.0766, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.87619833952571e-05, | |
| "loss": 2.0725, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.853263611760928e-05, | |
| "loss": 2.0637, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.830328883996147e-05, | |
| "loss": 2.064, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.807394156231366e-05, | |
| "loss": 2.0582, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.784459428466584e-05, | |
| "loss": 2.0575, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.761524700701803e-05, | |
| "loss": 2.0531, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.738589972937022e-05, | |
| "loss": 2.0473, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.71565524517224e-05, | |
| "loss": 2.0474, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.692720517407459e-05, | |
| "loss": 2.0426, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.6697857896426775e-05, | |
| "loss": 2.0421, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.646851061877895e-05, | |
| "loss": 2.0366, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6239163341131145e-05, | |
| "loss": 2.0312, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.600981606348332e-05, | |
| "loss": 2.0291, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.5780468785835515e-05, | |
| "loss": 2.0279, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.55511215081877e-05, | |
| "loss": 2.028, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.5321774230539885e-05, | |
| "loss": 2.023, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.509242695289207e-05, | |
| "loss": 2.0178, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.486307967524426e-05, | |
| "loss": 2.015, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.463373239759644e-05, | |
| "loss": 2.0145, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.440438511994863e-05, | |
| "loss": 2.0159, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.417503784230081e-05, | |
| "loss": 2.0089, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.3945690564653e-05, | |
| "loss": 2.004, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.371634328700519e-05, | |
| "loss": 2.0045, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.348699600935737e-05, | |
| "loss": 2.0026, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.325764873170956e-05, | |
| "loss": 1.9939, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.302830145406174e-05, | |
| "loss": 1.9928, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.279895417641393e-05, | |
| "loss": 1.9924, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.256960689876611e-05, | |
| "loss": 1.9886, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.23402596211183e-05, | |
| "loss": 1.9853, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.211091234347048e-05, | |
| "loss": 1.9814, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.1881565065822674e-05, | |
| "loss": 1.9826, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.165221778817485e-05, | |
| "loss": 1.98, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.1422870510527044e-05, | |
| "loss": 1.9785, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.119352323287923e-05, | |
| "loss": 1.9761, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.0964175955231414e-05, | |
| "loss": 1.9724, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.07348286775836e-05, | |
| "loss": 1.9746, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.0505481399935787e-05, | |
| "loss": 1.972, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.027613412228797e-05, | |
| "loss": 1.9707, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.0046786844640157e-05, | |
| "loss": 1.9685, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.981743956699234e-05, | |
| "loss": 1.9605, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9588092289344527e-05, | |
| "loss": 1.9596, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9358745011696716e-05, | |
| "loss": 1.9608, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.9129397734048897e-05, | |
| "loss": 1.9598, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.8900050456401086e-05, | |
| "loss": 1.9564, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.867070317875327e-05, | |
| "loss": 1.9557, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8441355901105456e-05, | |
| "loss": 1.9526, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.821200862345764e-05, | |
| "loss": 1.9472, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.7982661345809826e-05, | |
| "loss": 1.9466, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.775331406816201e-05, | |
| "loss": 1.9479, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.75239667905142e-05, | |
| "loss": 1.9448, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.729461951286638e-05, | |
| "loss": 1.9451, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.706527223521857e-05, | |
| "loss": 1.9458, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2.6835924957570758e-05, | |
| "loss": 1.9373, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.660657767992294e-05, | |
| "loss": 1.9364, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.6377230402275128e-05, | |
| "loss": 1.9345, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.614788312462731e-05, | |
| "loss": 1.9366, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.5918535846979498e-05, | |
| "loss": 1.9348, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.5689188569331686e-05, | |
| "loss": 1.9336, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5459841291683868e-05, | |
| "loss": 1.932, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5230494014036056e-05, | |
| "loss": 1.9333, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.500114673638824e-05, | |
| "loss": 1.9303, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4771799458740426e-05, | |
| "loss": 1.9276, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.454245218109261e-05, | |
| "loss": 1.9268, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.4313104903444796e-05, | |
| "loss": 1.9244, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.408375762579698e-05, | |
| "loss": 1.9256, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.385441034814917e-05, | |
| "loss": 1.9191, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.3625063070501355e-05, | |
| "loss": 1.9186, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.339571579285354e-05, | |
| "loss": 1.9188, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.3166368515205725e-05, | |
| "loss": 1.9179, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.2937021237557913e-05, | |
| "loss": 1.9176, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.27076739599101e-05, | |
| "loss": 1.9154, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2478326682262283e-05, | |
| "loss": 1.9119, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.224897940461447e-05, | |
| "loss": 1.9138, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.2019632126966653e-05, | |
| "loss": 1.9129, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.179028484931884e-05, | |
| "loss": 1.9114, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1560937571671023e-05, | |
| "loss": 1.909, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.133159029402321e-05, | |
| "loss": 1.9054, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.1102243016375397e-05, | |
| "loss": 1.905, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0872895738727582e-05, | |
| "loss": 1.9037, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0643548461079767e-05, | |
| "loss": 1.9061, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0414201183431952e-05, | |
| "loss": 1.902, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.018485390578414e-05, | |
| "loss": 1.8995, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.9955506628136325e-05, | |
| "loss": 1.8987, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.972615935048851e-05, | |
| "loss": 1.8988, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.9496812072840695e-05, | |
| "loss": 1.9013, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9267464795192884e-05, | |
| "loss": 1.8992, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.903811751754507e-05, | |
| "loss": 1.8974, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8808770239897254e-05, | |
| "loss": 1.8943, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.857942296224944e-05, | |
| "loss": 1.8954, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.8350075684601624e-05, | |
| "loss": 1.8942, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.812072840695381e-05, | |
| "loss": 1.8904, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7891381129305994e-05, | |
| "loss": 1.8915, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7662033851658182e-05, | |
| "loss": 1.8871, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7432686574010367e-05, | |
| "loss": 1.8872, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.7203339296362552e-05, | |
| "loss": 1.8885, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.6973992018714737e-05, | |
| "loss": 1.8835, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.6744644741066926e-05, | |
| "loss": 1.8854, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.651529746341911e-05, | |
| "loss": 1.8856, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.6285950185771296e-05, | |
| "loss": 1.8836, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.605660290812348e-05, | |
| "loss": 1.8834, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.582725563047567e-05, | |
| "loss": 1.8814, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5597908352827854e-05, | |
| "loss": 1.8795, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.536856107518004e-05, | |
| "loss": 1.8823, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.5139213797532223e-05, | |
| "loss": 1.8776, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.4909866519884411e-05, | |
| "loss": 1.8741, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4680519242236596e-05, | |
| "loss": 1.8779, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.4451171964588781e-05, | |
| "loss": 1.8771, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.4221824686940966e-05, | |
| "loss": 1.8791, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.3992477409293153e-05, | |
| "loss": 1.873, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3763130131645338e-05, | |
| "loss": 1.8713, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3533782853997523e-05, | |
| "loss": 1.8717, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.3304435576349708e-05, | |
| "loss": 1.8699, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.3075088298701896e-05, | |
| "loss": 1.8729, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2845741021054082e-05, | |
| "loss": 1.8708, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2616393743406265e-05, | |
| "loss": 1.869, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2387046465758452e-05, | |
| "loss": 1.8693, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.2157699188110638e-05, | |
| "loss": 1.8697, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.1928351910462823e-05, | |
| "loss": 1.8694, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.169900463281501e-05, | |
| "loss": 1.8692, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1469657355167195e-05, | |
| "loss": 1.871, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1240310077519382e-05, | |
| "loss": 1.8667, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.1010962799871565e-05, | |
| "loss": 1.8603, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0781615522223752e-05, | |
| "loss": 1.8638, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0552268244575937e-05, | |
| "loss": 1.8632, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0322920966928124e-05, | |
| "loss": 1.8652, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0093573689280309e-05, | |
| "loss": 1.8638, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "step": 87000, | |
| "total_flos": 5.8634472383906e+18, | |
| "train_loss": 0.0, | |
| "train_runtime": 3.9321, | |
| "train_samples_per_second": 7096699.346, | |
| "train_steps_per_second": 13860.374 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 54501, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 5.8634472383906e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |