| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9999330508717734, | |
| "eval_steps": 500, | |
| "global_step": 117624, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9787458341835e-05, | |
| "loss": 4.4684, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.957491668367e-05, | |
| "loss": 3.4213, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9362375025505e-05, | |
| "loss": 2.9387, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.914983336734e-05, | |
| "loss": 2.6616, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.8937291709175e-05, | |
| "loss": 2.4563, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.872475005101e-05, | |
| "loss": 2.2849, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8512208392845e-05, | |
| "loss": 2.1594, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.829966673468e-05, | |
| "loss": 2.0474, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.8087125076515e-05, | |
| "loss": 1.9558, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.787458341835e-05, | |
| "loss": 1.877, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7662041760185e-05, | |
| "loss": 1.8105, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.744950010202e-05, | |
| "loss": 1.7536, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7236958443855e-05, | |
| "loss": 1.7081, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.7024416785689995e-05, | |
| "loss": 1.6638, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6811875127524994e-05, | |
| "loss": 1.6277, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6599333469359994e-05, | |
| "loss": 1.5962, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.638679181119499e-05, | |
| "loss": 1.5655, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.617425015303e-05, | |
| "loss": 1.54, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5961708494865e-05, | |
| "loss": 1.5168, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.57491668367e-05, | |
| "loss": 1.4923, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.5536625178535e-05, | |
| "loss": 1.475, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.532408352036999e-05, | |
| "loss": 1.4551, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.511154186220499e-05, | |
| "loss": 1.4404, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.489900020403999e-05, | |
| "loss": 1.4239, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.4686458545874996e-05, | |
| "loss": 1.4138, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.4473916887709995e-05, | |
| "loss": 1.3989, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.4261375229544995e-05, | |
| "loss": 1.3857, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.4048833571379994e-05, | |
| "loss": 1.3811, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.3836291913214994e-05, | |
| "loss": 1.3652, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.362375025504999e-05, | |
| "loss": 1.3575, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.3411208596884986e-05, | |
| "loss": 1.3437, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.319866693871999e-05, | |
| "loss": 1.3325, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.298612528055499e-05, | |
| "loss": 1.3273, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.277358362238999e-05, | |
| "loss": 1.3199, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.256104196422499e-05, | |
| "loss": 1.3086, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.234850030605999e-05, | |
| "loss": 1.2987, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.213595864789499e-05, | |
| "loss": 1.2919, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.192341698972999e-05, | |
| "loss": 1.2866, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.171087533156499e-05, | |
| "loss": 1.2785, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.149833367339999e-05, | |
| "loss": 1.2691, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.128579201523499e-05, | |
| "loss": 1.2641, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.1073250357069987e-05, | |
| "loss": 1.2592, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.0860708698904986e-05, | |
| "loss": 1.2557, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.0648167040739986e-05, | |
| "loss": 1.2464, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.0435625382574985e-05, | |
| "loss": 1.2397, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.022308372440999e-05, | |
| "loss": 1.2366, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.001054206624499e-05, | |
| "loss": 1.2306, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.979800040807998e-05, | |
| "loss": 1.2231, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.958545874991498e-05, | |
| "loss": 1.2238, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.937291709174998e-05, | |
| "loss": 1.2159, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.916037543358498e-05, | |
| "loss": 1.213, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.894783377541998e-05, | |
| "loss": 1.2077, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.873529211725499e-05, | |
| "loss": 1.1987, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.852275045908999e-05, | |
| "loss": 1.1973, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.8310208800924986e-05, | |
| "loss": 1.1994, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.8097667142759986e-05, | |
| "loss": 1.1927, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.788512548459498e-05, | |
| "loss": 1.1876, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.767258382642998e-05, | |
| "loss": 1.1826, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.746004216826498e-05, | |
| "loss": 1.182, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.7247500510099984e-05, | |
| "loss": 1.1743, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.703495885193498e-05, | |
| "loss": 1.17, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.682241719376998e-05, | |
| "loss": 1.1697, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.660987553560498e-05, | |
| "loss": 1.1643, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.639733387743998e-05, | |
| "loss": 1.1603, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.618479221927498e-05, | |
| "loss": 1.1545, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.5972250561109974e-05, | |
| "loss": 1.1541, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.575970890294498e-05, | |
| "loss": 1.1526, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.554716724477998e-05, | |
| "loss": 1.1493, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.533462558661498e-05, | |
| "loss": 1.1445, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.512208392844998e-05, | |
| "loss": 1.1448, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.490954227028498e-05, | |
| "loss": 1.1402, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.469700061211998e-05, | |
| "loss": 1.1383, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.448445895395498e-05, | |
| "loss": 1.1321, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.4271917295789976e-05, | |
| "loss": 1.1289, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.4059375637624976e-05, | |
| "loss": 1.1267, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.3846833979459975e-05, | |
| "loss": 1.1277, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.3634292321294975e-05, | |
| "loss": 1.1262, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.3421750663129974e-05, | |
| "loss": 1.1191, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.3209209004964973e-05, | |
| "loss": 1.1184, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.299666734679997e-05, | |
| "loss": 1.1128, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.278412568863497e-05, | |
| "loss": 1.1132, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.257158403046998e-05, | |
| "loss": 1.1112, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.235904237230497e-05, | |
| "loss": 1.1079, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.214650071413997e-05, | |
| "loss": 1.104, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.193395905597497e-05, | |
| "loss": 1.1002, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.172141739780997e-05, | |
| "loss": 1.1009, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.150887573964497e-05, | |
| "loss": 1.0992, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.1296334081479975e-05, | |
| "loss": 1.0965, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.1083792423314975e-05, | |
| "loss": 1.0962, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.0871250765149974e-05, | |
| "loss": 1.0914, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.0658709106984974e-05, | |
| "loss": 1.0928, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.0446167448819966e-05, | |
| "loss": 1.0898, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.023362579065497e-05, | |
| "loss": 1.0877, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.002108413248997e-05, | |
| "loss": 1.0868, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.9808542474324968e-05, | |
| "loss": 1.0834, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9596000816159968e-05, | |
| "loss": 1.0795, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.938345915799497e-05, | |
| "loss": 1.0769, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.917091749982997e-05, | |
| "loss": 1.0741, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.895837584166497e-05, | |
| "loss": 1.0746, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.874583418349997e-05, | |
| "loss": 1.0726, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8533292525334965e-05, | |
| "loss": 1.0739, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.8320750867169965e-05, | |
| "loss": 1.0666, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.8108209209004964e-05, | |
| "loss": 1.0668, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.7895667550839967e-05, | |
| "loss": 1.0654, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.7683125892674966e-05, | |
| "loss": 1.0652, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.7470584234509966e-05, | |
| "loss": 1.065, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.7258042576344965e-05, | |
| "loss": 1.0675, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.7045500918179968e-05, | |
| "loss": 1.0609, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2.683295926001496e-05, | |
| "loss": 1.0625, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.662041760184996e-05, | |
| "loss": 1.058, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.6407875943684963e-05, | |
| "loss": 1.0542, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.6195334285519963e-05, | |
| "loss": 1.0533, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.5982792627354962e-05, | |
| "loss": 1.0482, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.577025096918996e-05, | |
| "loss": 1.052, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5557709311024964e-05, | |
| "loss": 1.0514, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.5345167652859964e-05, | |
| "loss": 1.0481, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5132625994694963e-05, | |
| "loss": 1.0474, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.4920084336529963e-05, | |
| "loss": 1.0426, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4707542678364962e-05, | |
| "loss": 1.042, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4495001020199958e-05, | |
| "loss": 1.0408, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.428245936203496e-05, | |
| "loss": 1.043, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.406991770386996e-05, | |
| "loss": 1.0384, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.385737604570496e-05, | |
| "loss": 1.0373, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.364483438753996e-05, | |
| "loss": 1.0343, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.343229272937496e-05, | |
| "loss": 1.0348, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.321975107120996e-05, | |
| "loss": 1.0339, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.3007209413044958e-05, | |
| "loss": 1.0326, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2794667754879957e-05, | |
| "loss": 1.0329, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2582126096714957e-05, | |
| "loss": 1.0273, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.2369584438549956e-05, | |
| "loss": 1.027, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.2157042780384956e-05, | |
| "loss": 1.0304, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.194450112221996e-05, | |
| "loss": 1.0279, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.1731959464054955e-05, | |
| "loss": 1.0274, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1519417805889954e-05, | |
| "loss": 1.0244, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.1306876147724954e-05, | |
| "loss": 1.023, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.1094334489559956e-05, | |
| "loss": 1.0218, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0881792831394956e-05, | |
| "loss": 1.0233, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0669251173229952e-05, | |
| "loss": 1.0195, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.0456709515064955e-05, | |
| "loss": 1.0172, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0244167856899954e-05, | |
| "loss": 1.0144, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.0031626198734954e-05, | |
| "loss": 1.0188, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.9819084540569953e-05, | |
| "loss": 1.019, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9606542882404953e-05, | |
| "loss": 1.014, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9394001224239952e-05, | |
| "loss": 1.0112, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.918145956607495e-05, | |
| "loss": 1.012, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.896891790790995e-05, | |
| "loss": 1.0117, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.875637624974495e-05, | |
| "loss": 1.0103, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.854383459157995e-05, | |
| "loss": 1.0099, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.833129293341495e-05, | |
| "loss": 1.0073, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.8118751275249952e-05, | |
| "loss": 1.0091, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.790620961708495e-05, | |
| "loss": 1.0059, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7693667958919948e-05, | |
| "loss": 1.001, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.748112630075495e-05, | |
| "loss": 1.0034, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.726858464258995e-05, | |
| "loss": 1.0015, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.7056042984424946e-05, | |
| "loss": 1.0036, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6843501326259946e-05, | |
| "loss": 1.0014, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.663095966809495e-05, | |
| "loss": 1.0012, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6418418009929948e-05, | |
| "loss": 1.0002, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6205876351764944e-05, | |
| "loss": 0.9992, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.5993334693599947e-05, | |
| "loss": 0.996, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5780793035434946e-05, | |
| "loss": 0.9908, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5568251377269946e-05, | |
| "loss": 0.9953, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.5355709719104945e-05, | |
| "loss": 0.9921, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.5143168060939945e-05, | |
| "loss": 0.9906, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.4930626402774944e-05, | |
| "loss": 0.9919, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4718084744609945e-05, | |
| "loss": 0.9903, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.4505543086444945e-05, | |
| "loss": 0.9882, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.4293001428279943e-05, | |
| "loss": 0.9889, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.4080459770114942e-05, | |
| "loss": 0.9855, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3867918111949943e-05, | |
| "loss": 0.9882, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.3655376453784943e-05, | |
| "loss": 0.9851, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3442834795619944e-05, | |
| "loss": 0.9853, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.3230293137454942e-05, | |
| "loss": 0.986, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.3017751479289941e-05, | |
| "loss": 0.9861, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2805209821124942e-05, | |
| "loss": 0.9837, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2592668162959942e-05, | |
| "loss": 0.9819, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2380126504794941e-05, | |
| "loss": 0.983, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.2167584846629939e-05, | |
| "loss": 0.9832, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.195504318846494e-05, | |
| "loss": 0.9788, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.174250153029994e-05, | |
| "loss": 0.982, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1529959872134939e-05, | |
| "loss": 0.9786, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.1317418213969938e-05, | |
| "loss": 0.98, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1104876555804938e-05, | |
| "loss": 0.9789, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0892334897639939e-05, | |
| "loss": 0.9767, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0679793239474937e-05, | |
| "loss": 0.9762, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0467251581309938e-05, | |
| "loss": 0.9766, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0254709923144937e-05, | |
| "loss": 0.9749, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.0042168264979937e-05, | |
| "loss": 0.9752, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.829626606814936e-06, | |
| "loss": 0.9751, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.617084948649936e-06, | |
| "loss": 0.9702, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.404543290484935e-06, | |
| "loss": 0.9744, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 9.192001632319935e-06, | |
| "loss": 0.9701, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 8.979459974154934e-06, | |
| "loss": 0.9705, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.766918315989935e-06, | |
| "loss": 0.9708, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.554376657824933e-06, | |
| "loss": 0.9712, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.341834999659934e-06, | |
| "loss": 0.9689, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.129293341494934e-06, | |
| "loss": 0.9672, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 7.916751683329933e-06, | |
| "loss": 0.967, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.704210025164933e-06, | |
| "loss": 0.9665, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.491668366999932e-06, | |
| "loss": 0.9654, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.2791267088349325e-06, | |
| "loss": 0.967, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 7.066585050669931e-06, | |
| "loss": 0.9666, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.854043392504931e-06, | |
| "loss": 0.9635, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.641501734339931e-06, | |
| "loss": 0.9681, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.428960076174931e-06, | |
| "loss": 0.9631, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.216418418009931e-06, | |
| "loss": 0.9653, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.00387675984493e-06, | |
| "loss": 0.9612, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.7913351016799295e-06, | |
| "loss": 0.9633, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.578793443514929e-06, | |
| "loss": 0.9595, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.3662517853499285e-06, | |
| "loss": 0.9617, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.153710127184928e-06, | |
| "loss": 0.9609, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.941168469019928e-06, | |
| "loss": 0.9588, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.728626810854928e-06, | |
| "loss": 0.9613, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.516085152689927e-06, | |
| "loss": 0.9595, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.3035434945249275e-06, | |
| "loss": 0.9592, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.091001836359927e-06, | |
| "loss": 0.959, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.878460178194926e-06, | |
| "loss": 0.9579, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.6659185200299263e-06, | |
| "loss": 0.9581, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.4533768618649257e-06, | |
| "loss": 0.9574, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.240835203699925e-06, | |
| "loss": 0.9567, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.028293545534925e-06, | |
| "loss": 0.9584, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.8157518873699246e-06, | |
| "loss": 0.9519, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.603210229204924e-06, | |
| "loss": 0.9539, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.390668571039924e-06, | |
| "loss": 0.9568, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.1781269128749238e-06, | |
| "loss": 0.9525, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.9655852547099233e-06, | |
| "loss": 0.958, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.753043596544923e-06, | |
| "loss": 0.9556, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.5405019383799224e-06, | |
| "loss": 0.9521, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.3279602802149223e-06, | |
| "loss": 0.9542, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.1154186220499217e-06, | |
| "loss": 0.953, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.028769638849214e-07, | |
| "loss": 0.9548, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.903353057199211e-07, | |
| "loss": 0.9539, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.777936475549208e-07, | |
| "loss": 0.9532, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.6525198938992043e-07, | |
| "loss": 0.9539, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 5.271033122492008e-08, | |
| "loss": 0.9543, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 117624, | |
| "total_flos": 3.510933473606687e+19, | |
| "train_loss": 0.762959968830496, | |
| "train_runtime": 1022407.089, | |
| "train_samples_per_second": 36.815, | |
| "train_steps_per_second": 0.115 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 117624, | |
| "num_train_epochs": 3, | |
| "save_steps": 10000, | |
| "total_flos": 3.510933473606687e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |