| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 84.95695514272768, | |
| "global_step": 187500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.864000000000001e-06, | |
| "loss": 8.2983, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.5856e-05, | |
| "loss": 6.5434, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.3848e-05, | |
| "loss": 6.1765, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 3.184000000000001e-05, | |
| "loss": 5.9601, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 3.9832e-05, | |
| "loss": 5.8266, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.7824e-05, | |
| "loss": 5.7332, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 5.5816e-05, | |
| "loss": 5.6554, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 6.380800000000001e-05, | |
| "loss": 5.5975, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 7.18e-05, | |
| "loss": 5.5568, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 7.9792e-05, | |
| "loss": 5.5224, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 7.95614647887324e-05, | |
| "loss": 5.4944, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 7.911121126760565e-05, | |
| "loss": 5.473, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 7.866095774647888e-05, | |
| "loss": 5.4557, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 7.821070422535212e-05, | |
| "loss": 5.4392, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 7.776045070422535e-05, | |
| "loss": 5.4274, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 7.73101971830986e-05, | |
| "loss": 5.4168, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 7.685994366197184e-05, | |
| "loss": 5.4051, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 7.640969014084507e-05, | |
| "loss": 5.3963, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 7.595943661971832e-05, | |
| "loss": 5.3901, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 7.550918309859156e-05, | |
| "loss": 5.3848, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 7.505892957746479e-05, | |
| "loss": 5.2904, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 7.460867605633804e-05, | |
| "loss": 4.6695, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 7.415842253521126e-05, | |
| "loss": 3.6226, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "learning_rate": 7.370816901408451e-05, | |
| "loss": 2.5281, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 11.33, | |
| "learning_rate": 7.325791549295775e-05, | |
| "loss": 2.2386, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 7.2807661971831e-05, | |
| "loss": 2.0823, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 12.23, | |
| "learning_rate": 7.235740845070423e-05, | |
| "loss": 1.9728, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 12.69, | |
| "learning_rate": 7.190715492957747e-05, | |
| "loss": 1.8923, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 13.14, | |
| "learning_rate": 7.145690140845072e-05, | |
| "loss": 1.8241, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 7.100664788732395e-05, | |
| "loss": 1.7682, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 7.055639436619719e-05, | |
| "loss": 1.7244, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "learning_rate": 7.010614084507043e-05, | |
| "loss": 1.6804, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 14.95, | |
| "learning_rate": 6.965588732394366e-05, | |
| "loss": 1.6465, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 6.920563380281691e-05, | |
| "loss": 1.6159, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 15.86, | |
| "learning_rate": 6.875538028169015e-05, | |
| "loss": 1.5909, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 16.31, | |
| "learning_rate": 6.830512676056338e-05, | |
| "loss": 1.5664, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 16.76, | |
| "learning_rate": 6.785487323943663e-05, | |
| "loss": 1.5481, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 6.740461971830987e-05, | |
| "loss": 1.5282, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 17.67, | |
| "learning_rate": 6.69543661971831e-05, | |
| "loss": 1.5103, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 18.12, | |
| "learning_rate": 6.650411267605634e-05, | |
| "loss": 1.4946, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 18.58, | |
| "learning_rate": 6.605385915492959e-05, | |
| "loss": 1.4793, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "learning_rate": 6.560360563380282e-05, | |
| "loss": 1.4647, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 19.48, | |
| "learning_rate": 6.515335211267606e-05, | |
| "loss": 1.4525, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 6.47030985915493e-05, | |
| "loss": 1.4408, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 20.39, | |
| "learning_rate": 6.425284507042254e-05, | |
| "loss": 1.429, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 20.84, | |
| "learning_rate": 6.380259154929578e-05, | |
| "loss": 1.4191, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 21.3, | |
| "learning_rate": 6.335233802816903e-05, | |
| "loss": 1.4093, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 21.75, | |
| "learning_rate": 6.290208450704226e-05, | |
| "loss": 1.399, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 22.2, | |
| "learning_rate": 6.24518309859155e-05, | |
| "loss": 1.3902, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 22.66, | |
| "learning_rate": 6.200157746478873e-05, | |
| "loss": 1.3811, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 23.11, | |
| "learning_rate": 6.155132394366198e-05, | |
| "loss": 1.3738, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 23.56, | |
| "learning_rate": 6.110107042253522e-05, | |
| "loss": 1.3651, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 24.01, | |
| "learning_rate": 6.0650816901408453e-05, | |
| "loss": 1.361, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 6.0200563380281696e-05, | |
| "loss": 1.3519, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 24.92, | |
| "learning_rate": 5.975030985915493e-05, | |
| "loss": 1.3456, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 25.37, | |
| "learning_rate": 5.9300056338028174e-05, | |
| "loss": 1.3394, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 25.83, | |
| "learning_rate": 5.8849802816901416e-05, | |
| "loss": 1.3332, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 26.28, | |
| "learning_rate": 5.839954929577465e-05, | |
| "loss": 1.3273, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 26.73, | |
| "learning_rate": 5.7949295774647894e-05, | |
| "loss": 1.3222, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 27.19, | |
| "learning_rate": 5.7499042253521136e-05, | |
| "loss": 1.3159, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 27.64, | |
| "learning_rate": 5.7048788732394365e-05, | |
| "loss": 1.3117, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 28.09, | |
| "learning_rate": 5.659853521126761e-05, | |
| "loss": 1.3067, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 28.55, | |
| "learning_rate": 5.614828169014085e-05, | |
| "loss": 1.3015, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "learning_rate": 5.569802816901409e-05, | |
| "loss": 1.2972, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 29.45, | |
| "learning_rate": 5.524777464788733e-05, | |
| "loss": 1.291, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 29.9, | |
| "learning_rate": 5.479752112676057e-05, | |
| "loss": 1.2877, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 30.36, | |
| "learning_rate": 5.434726760563381e-05, | |
| "loss": 1.2836, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 30.81, | |
| "learning_rate": 5.389701408450704e-05, | |
| "loss": 1.2798, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 31.26, | |
| "learning_rate": 5.344721126760564e-05, | |
| "loss": 1.276, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 31.72, | |
| "learning_rate": 5.2996507042253526e-05, | |
| "loss": 1.2723, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 32.17, | |
| "learning_rate": 5.254670422535211e-05, | |
| "loss": 1.2676, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 32.62, | |
| "learning_rate": 5.2096000000000004e-05, | |
| "loss": 1.2645, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 33.08, | |
| "learning_rate": 5.1645746478873246e-05, | |
| "loss": 1.2604, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 33.53, | |
| "learning_rate": 5.119549295774648e-05, | |
| "loss": 1.2577, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 33.98, | |
| "learning_rate": 5.0745239436619724e-05, | |
| "loss": 1.2552, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 34.44, | |
| "learning_rate": 5.029498591549297e-05, | |
| "loss": 1.2516, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 34.89, | |
| "learning_rate": 4.9844732394366195e-05, | |
| "loss": 1.2488, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 35.34, | |
| "learning_rate": 4.939447887323944e-05, | |
| "loss": 1.2458, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 35.8, | |
| "learning_rate": 4.894467605633804e-05, | |
| "loss": 1.2443, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 36.25, | |
| "learning_rate": 4.8493971830985916e-05, | |
| "loss": 1.2398, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 36.7, | |
| "learning_rate": 4.804371830985916e-05, | |
| "loss": 1.2374, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 37.15, | |
| "learning_rate": 4.75934647887324e-05, | |
| "loss": 1.2341, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 37.61, | |
| "learning_rate": 4.714321126760564e-05, | |
| "loss": 1.2321, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 38.06, | |
| "learning_rate": 4.669295774647888e-05, | |
| "loss": 1.23, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 38.51, | |
| "learning_rate": 4.624270422535212e-05, | |
| "loss": 1.2265, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 38.97, | |
| "learning_rate": 4.5792450704225356e-05, | |
| "loss": 1.2253, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 39.42, | |
| "learning_rate": 4.534219718309859e-05, | |
| "loss": 1.2227, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 39.87, | |
| "learning_rate": 4.4891943661971834e-05, | |
| "loss": 1.2204, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 40.33, | |
| "learning_rate": 4.4441690140845077e-05, | |
| "loss": 1.2171, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 40.78, | |
| "learning_rate": 4.399143661971831e-05, | |
| "loss": 1.2154, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 41.23, | |
| "learning_rate": 4.3541183098591555e-05, | |
| "loss": 1.2132, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 41.69, | |
| "learning_rate": 4.30909295774648e-05, | |
| "loss": 1.2118, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 42.14, | |
| "learning_rate": 4.2640676056338026e-05, | |
| "loss": 1.2088, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 42.59, | |
| "learning_rate": 4.2189971830985916e-05, | |
| "loss": 1.2068, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 43.04, | |
| "learning_rate": 4.173971830985916e-05, | |
| "loss": 1.2063, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 43.5, | |
| "learning_rate": 4.12894647887324e-05, | |
| "loss": 1.2025, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 43.95, | |
| "learning_rate": 4.083921126760564e-05, | |
| "loss": 1.2013, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 44.4, | |
| "learning_rate": 4.038895774647888e-05, | |
| "loss": 1.2002, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 44.86, | |
| "learning_rate": 3.993825352112676e-05, | |
| "loss": 1.198, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 45.31, | |
| "learning_rate": 3.9488e-05, | |
| "loss": 1.1962, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 45.76, | |
| "learning_rate": 3.903774647887324e-05, | |
| "loss": 1.1953, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 46.22, | |
| "learning_rate": 3.8587492957746483e-05, | |
| "loss": 1.1927, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 46.67, | |
| "learning_rate": 3.813723943661972e-05, | |
| "loss": 1.1909, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 47.12, | |
| "learning_rate": 3.768698591549296e-05, | |
| "loss": 1.1891, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 47.58, | |
| "learning_rate": 3.72367323943662e-05, | |
| "loss": 1.1876, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 48.03, | |
| "learning_rate": 3.678647887323944e-05, | |
| "loss": 1.1868, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 48.48, | |
| "learning_rate": 3.633622535211268e-05, | |
| "loss": 1.1841, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 48.94, | |
| "learning_rate": 3.588597183098592e-05, | |
| "loss": 1.1844, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 49.39, | |
| "learning_rate": 3.543571830985916e-05, | |
| "loss": 1.1815, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 49.84, | |
| "learning_rate": 3.4985464788732395e-05, | |
| "loss": 1.1809, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 50.29, | |
| "learning_rate": 3.453521126760564e-05, | |
| "loss": 1.1784, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 50.75, | |
| "learning_rate": 3.408495774647888e-05, | |
| "loss": 1.1774, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 51.2, | |
| "learning_rate": 3.3634704225352115e-05, | |
| "loss": 1.1761, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 51.65, | |
| "learning_rate": 3.318445070422535e-05, | |
| "loss": 1.175, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 52.11, | |
| "learning_rate": 3.273419718309859e-05, | |
| "loss": 1.1742, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 52.56, | |
| "learning_rate": 3.2283943661971836e-05, | |
| "loss": 1.1726, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 53.01, | |
| "learning_rate": 3.183323943661972e-05, | |
| "loss": 1.1719, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 53.47, | |
| "learning_rate": 3.1382985915492955e-05, | |
| "loss": 1.1699, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 53.92, | |
| "learning_rate": 3.09327323943662e-05, | |
| "loss": 1.1684, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 54.37, | |
| "learning_rate": 3.048247887323944e-05, | |
| "loss": 1.1676, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 54.83, | |
| "learning_rate": 3.003222535211268e-05, | |
| "loss": 1.1662, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 55.28, | |
| "learning_rate": 2.9581971830985918e-05, | |
| "loss": 1.1647, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 55.73, | |
| "learning_rate": 2.9131718309859157e-05, | |
| "loss": 1.1643, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 56.18, | |
| "learning_rate": 2.86814647887324e-05, | |
| "loss": 1.1639, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 56.64, | |
| "learning_rate": 2.8231211267605635e-05, | |
| "loss": 1.1617, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 57.09, | |
| "learning_rate": 2.7780957746478874e-05, | |
| "loss": 1.1606, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 57.54, | |
| "learning_rate": 2.7330704225352116e-05, | |
| "loss": 1.1608, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 58.0, | |
| "learning_rate": 2.6880450704225355e-05, | |
| "loss": 1.159, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 58.45, | |
| "learning_rate": 2.643019718309859e-05, | |
| "loss": 1.1572, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 58.9, | |
| "learning_rate": 2.5979943661971833e-05, | |
| "loss": 1.1572, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 59.36, | |
| "learning_rate": 2.5529690140845072e-05, | |
| "loss": 1.1562, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 59.81, | |
| "learning_rate": 2.5079436619718314e-05, | |
| "loss": 1.1551, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 60.26, | |
| "learning_rate": 2.462918309859155e-05, | |
| "loss": 1.1547, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 60.72, | |
| "learning_rate": 2.417892957746479e-05, | |
| "loss": 1.153, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 61.17, | |
| "learning_rate": 2.372867605633803e-05, | |
| "loss": 1.1526, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 61.62, | |
| "learning_rate": 2.327842253521127e-05, | |
| "loss": 1.1522, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 62.08, | |
| "learning_rate": 2.2828169014084506e-05, | |
| "loss": 1.1505, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 62.53, | |
| "learning_rate": 2.2377915492957748e-05, | |
| "loss": 1.1505, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 62.98, | |
| "learning_rate": 2.1927661971830987e-05, | |
| "loss": 1.1485, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 63.43, | |
| "learning_rate": 2.1477859154929578e-05, | |
| "loss": 1.1482, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 63.89, | |
| "learning_rate": 2.102715492957747e-05, | |
| "loss": 1.1466, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 64.34, | |
| "learning_rate": 2.0576901408450704e-05, | |
| "loss": 1.1467, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 64.79, | |
| "learning_rate": 2.0126647887323946e-05, | |
| "loss": 1.146, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 65.25, | |
| "learning_rate": 1.9676394366197185e-05, | |
| "loss": 1.1452, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 65.7, | |
| "learning_rate": 1.9226140845070424e-05, | |
| "loss": 1.1447, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 66.15, | |
| "learning_rate": 1.8775887323943663e-05, | |
| "loss": 1.1441, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 66.61, | |
| "learning_rate": 1.8325633802816902e-05, | |
| "loss": 1.143, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 67.06, | |
| "learning_rate": 1.787538028169014e-05, | |
| "loss": 1.1433, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 67.51, | |
| "learning_rate": 1.7425126760563384e-05, | |
| "loss": 1.1414, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 67.97, | |
| "learning_rate": 1.697487323943662e-05, | |
| "loss": 1.1413, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 68.42, | |
| "learning_rate": 1.652461971830986e-05, | |
| "loss": 1.14, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 68.87, | |
| "learning_rate": 1.60743661971831e-05, | |
| "loss": 1.14, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 69.32, | |
| "learning_rate": 1.562411267605634e-05, | |
| "loss": 1.139, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 69.78, | |
| "learning_rate": 1.5173859154929578e-05, | |
| "loss": 1.1385, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 70.23, | |
| "learning_rate": 1.4723605633802817e-05, | |
| "loss": 1.1385, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 70.68, | |
| "learning_rate": 1.4272901408450706e-05, | |
| "loss": 1.1374, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 71.14, | |
| "learning_rate": 1.3822647887323945e-05, | |
| "loss": 1.1362, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 71.59, | |
| "learning_rate": 1.3372394366197183e-05, | |
| "loss": 1.137, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 72.04, | |
| "learning_rate": 1.2922140845070423e-05, | |
| "loss": 1.1355, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 72.5, | |
| "learning_rate": 1.2471887323943664e-05, | |
| "loss": 1.1342, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 72.95, | |
| "learning_rate": 1.2021633802816903e-05, | |
| "loss": 1.1347, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 73.4, | |
| "learning_rate": 1.157138028169014e-05, | |
| "loss": 1.133, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 73.86, | |
| "learning_rate": 1.1121126760563381e-05, | |
| "loss": 1.1339, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 74.31, | |
| "learning_rate": 1.0670873239436622e-05, | |
| "loss": 1.1332, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 74.76, | |
| "learning_rate": 1.022061971830986e-05, | |
| "loss": 1.1319, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 75.22, | |
| "learning_rate": 9.7703661971831e-06, | |
| "loss": 1.1311, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 75.67, | |
| "learning_rate": 9.32056338028169e-06, | |
| "loss": 1.1303, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 76.12, | |
| "learning_rate": 8.869859154929579e-06, | |
| "loss": 1.1318, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 76.57, | |
| "learning_rate": 8.419605633802818e-06, | |
| "loss": 1.1304, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 77.03, | |
| "learning_rate": 7.969352112676057e-06, | |
| "loss": 1.1297, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 77.48, | |
| "learning_rate": 7.519098591549297e-06, | |
| "loss": 1.1295, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 77.93, | |
| "learning_rate": 7.068845070422535e-06, | |
| "loss": 1.1291, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 78.39, | |
| "learning_rate": 6.618591549295776e-06, | |
| "loss": 1.1287, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 78.84, | |
| "learning_rate": 6.167887323943662e-06, | |
| "loss": 1.128, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 79.29, | |
| "learning_rate": 5.717633802816902e-06, | |
| "loss": 1.1283, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 79.75, | |
| "learning_rate": 5.267380281690141e-06, | |
| "loss": 1.1272, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 80.2, | |
| "learning_rate": 4.817126760563381e-06, | |
| "loss": 1.1273, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 80.65, | |
| "learning_rate": 4.366873239436621e-06, | |
| "loss": 1.1271, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 81.11, | |
| "learning_rate": 3.9166197183098595e-06, | |
| "loss": 1.126, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 81.56, | |
| "learning_rate": 3.4663661971830985e-06, | |
| "loss": 1.1263, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 82.01, | |
| "learning_rate": 3.016112676056338e-06, | |
| "loss": 1.1261, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 82.46, | |
| "learning_rate": 2.5658591549295773e-06, | |
| "loss": 1.1247, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 82.92, | |
| "learning_rate": 2.115605633802817e-06, | |
| "loss": 1.1267, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 83.37, | |
| "learning_rate": 1.6653521126760563e-06, | |
| "loss": 1.1246, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 83.82, | |
| "learning_rate": 1.2150985915492959e-06, | |
| "loss": 1.1251, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 84.28, | |
| "learning_rate": 7.648450704225354e-07, | |
| "loss": 1.1251, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 84.73, | |
| "learning_rate": 3.145915492957747e-07, | |
| "loss": 1.1246, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 84.96, | |
| "step": 187500, | |
| "total_flos": 1.0104234636302352e+20, | |
| "train_loss": 0.10515284895833334, | |
| "train_runtime": 4947.3494, | |
| "train_samples_per_second": 77617.32, | |
| "train_steps_per_second": 37.899 | |
| } | |
| ], | |
| "max_steps": 187500, | |
| "num_train_epochs": 85, | |
| "total_flos": 1.0104234636302352e+20, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |