| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 128.95460797799174, | |
| "global_step": 187500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.864000000000001e-06, | |
| "loss": 8.2609, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.5856e-05, | |
| "loss": 6.5066, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.3848e-05, | |
| "loss": 6.1594, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.184000000000001e-05, | |
| "loss": 5.9447, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.9832e-05, | |
| "loss": 5.798, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 4.7824e-05, | |
| "loss": 5.6962, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 5.5816e-05, | |
| "loss": 5.621, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 6.380800000000001e-05, | |
| "loss": 5.5589, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 7.18e-05, | |
| "loss": 5.5164, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 7.9792e-05, | |
| "loss": 5.4815, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 7.95614647887324e-05, | |
| "loss": 5.4511, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 7.911121126760565e-05, | |
| "loss": 5.4282, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 7.866095774647888e-05, | |
| "loss": 5.4113, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 7.821070422535212e-05, | |
| "loss": 5.396, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 7.776045070422535e-05, | |
| "loss": 5.3815, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 7.73101971830986e-05, | |
| "loss": 5.3724, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 11.69, | |
| "learning_rate": 7.685994366197184e-05, | |
| "loss": 5.3629, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 7.640969014084507e-05, | |
| "loss": 5.3524, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 13.07, | |
| "learning_rate": 7.595943661971832e-05, | |
| "loss": 5.3473, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 13.76, | |
| "learning_rate": 7.550918309859156e-05, | |
| "loss": 5.3371, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 7.505892957746479e-05, | |
| "loss": 5.3312, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 15.13, | |
| "learning_rate": 7.460867605633804e-05, | |
| "loss": 5.328, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 15.82, | |
| "learning_rate": 7.415842253521126e-05, | |
| "loss": 5.3214, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 16.51, | |
| "learning_rate": 7.370816901408451e-05, | |
| "loss": 5.2554, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 17.19, | |
| "learning_rate": 7.325791549295775e-05, | |
| "loss": 4.7986, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 17.88, | |
| "learning_rate": 7.2807661971831e-05, | |
| "loss": 3.9095, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 7.235740845070423e-05, | |
| "loss": 2.6754, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 19.26, | |
| "learning_rate": 7.190715492957747e-05, | |
| "loss": 2.3377, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 7.145690140845072e-05, | |
| "loss": 2.1672, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 20.63, | |
| "learning_rate": 7.100664788732395e-05, | |
| "loss": 2.0414, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 21.32, | |
| "learning_rate": 7.055639436619719e-05, | |
| "loss": 1.9491, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 22.01, | |
| "learning_rate": 7.010614084507043e-05, | |
| "loss": 1.8796, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 22.7, | |
| "learning_rate": 6.965588732394366e-05, | |
| "loss": 1.8244, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 23.38, | |
| "learning_rate": 6.920563380281691e-05, | |
| "loss": 1.7778, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 24.07, | |
| "learning_rate": 6.875538028169015e-05, | |
| "loss": 1.7414, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 24.76, | |
| "learning_rate": 6.830512676056338e-05, | |
| "loss": 1.7093, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 25.45, | |
| "learning_rate": 6.785487323943663e-05, | |
| "loss": 1.68, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 26.13, | |
| "learning_rate": 6.740461971830987e-05, | |
| "loss": 1.6544, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 26.82, | |
| "learning_rate": 6.69543661971831e-05, | |
| "loss": 1.6316, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 27.51, | |
| "learning_rate": 6.650411267605634e-05, | |
| "loss": 1.611, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 28.2, | |
| "learning_rate": 6.605385915492959e-05, | |
| "loss": 1.5917, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 28.89, | |
| "learning_rate": 6.560360563380282e-05, | |
| "loss": 1.5737, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 29.57, | |
| "learning_rate": 6.515335211267606e-05, | |
| "loss": 1.5566, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 30.26, | |
| "learning_rate": 6.47030985915493e-05, | |
| "loss": 1.5425, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 30.95, | |
| "learning_rate": 6.425284507042254e-05, | |
| "loss": 1.5281, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 31.64, | |
| "learning_rate": 6.380259154929578e-05, | |
| "loss": 1.5159, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 32.32, | |
| "learning_rate": 6.335233802816903e-05, | |
| "loss": 1.504, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 33.01, | |
| "learning_rate": 6.290208450704226e-05, | |
| "loss": 1.4925, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 33.7, | |
| "learning_rate": 6.24518309859155e-05, | |
| "loss": 1.4811, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 34.39, | |
| "learning_rate": 6.200157746478873e-05, | |
| "loss": 1.4724, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 35.08, | |
| "learning_rate": 6.155132394366198e-05, | |
| "loss": 1.4627, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 35.76, | |
| "learning_rate": 6.110107042253522e-05, | |
| "loss": 1.4542, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 36.45, | |
| "learning_rate": 6.0650816901408453e-05, | |
| "loss": 1.4449, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 37.14, | |
| "learning_rate": 6.0200563380281696e-05, | |
| "loss": 1.4365, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 37.83, | |
| "learning_rate": 5.975030985915493e-05, | |
| "loss": 1.4287, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 38.51, | |
| "learning_rate": 5.9300056338028174e-05, | |
| "loss": 1.4216, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 39.2, | |
| "learning_rate": 5.8849802816901416e-05, | |
| "loss": 1.414, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 39.89, | |
| "learning_rate": 5.839954929577465e-05, | |
| "loss": 1.4082, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 40.58, | |
| "learning_rate": 5.7949295774647894e-05, | |
| "loss": 1.4009, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 41.27, | |
| "learning_rate": 5.7499042253521136e-05, | |
| "loss": 1.3951, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 41.95, | |
| "learning_rate": 5.7048788732394365e-05, | |
| "loss": 1.3894, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 42.64, | |
| "learning_rate": 5.659853521126761e-05, | |
| "loss": 1.3837, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 43.33, | |
| "learning_rate": 5.614828169014085e-05, | |
| "loss": 1.3782, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 44.02, | |
| "learning_rate": 5.569802816901409e-05, | |
| "loss": 1.3711, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 44.7, | |
| "learning_rate": 5.524777464788733e-05, | |
| "loss": 1.3684, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 45.39, | |
| "learning_rate": 5.479752112676057e-05, | |
| "loss": 1.362, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 46.08, | |
| "learning_rate": 5.434726760563381e-05, | |
| "loss": 1.3588, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 46.77, | |
| "learning_rate": 5.389701408450704e-05, | |
| "loss": 1.3527, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 47.46, | |
| "learning_rate": 5.3446760563380284e-05, | |
| "loss": 1.3484, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 48.14, | |
| "learning_rate": 5.2996507042253526e-05, | |
| "loss": 1.3441, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 48.83, | |
| "learning_rate": 5.254625352112676e-05, | |
| "loss": 1.3403, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 49.52, | |
| "learning_rate": 5.2096000000000004e-05, | |
| "loss": 1.3362, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 50.21, | |
| "learning_rate": 5.1645746478873246e-05, | |
| "loss": 1.3325, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 50.89, | |
| "learning_rate": 5.119549295774648e-05, | |
| "loss": 1.3282, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 51.58, | |
| "learning_rate": 5.0745239436619724e-05, | |
| "loss": 1.3243, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 52.27, | |
| "learning_rate": 5.029498591549297e-05, | |
| "loss": 1.3211, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 52.96, | |
| "learning_rate": 4.9844732394366195e-05, | |
| "loss": 1.3179, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 53.65, | |
| "learning_rate": 4.939447887323944e-05, | |
| "loss": 1.3145, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 54.33, | |
| "learning_rate": 4.894422535211268e-05, | |
| "loss": 1.3101, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 55.02, | |
| "learning_rate": 4.8493971830985916e-05, | |
| "loss": 1.3087, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 55.71, | |
| "learning_rate": 4.804371830985916e-05, | |
| "loss": 1.3034, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 56.4, | |
| "learning_rate": 4.75934647887324e-05, | |
| "loss": 1.3019, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 57.08, | |
| "learning_rate": 4.714321126760564e-05, | |
| "loss": 1.2984, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 57.77, | |
| "learning_rate": 4.669295774647888e-05, | |
| "loss": 1.2969, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 58.46, | |
| "learning_rate": 4.624270422535212e-05, | |
| "loss": 1.2925, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 59.15, | |
| "learning_rate": 4.5792450704225356e-05, | |
| "loss": 1.2898, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 59.83, | |
| "learning_rate": 4.534219718309859e-05, | |
| "loss": 1.2878, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 60.52, | |
| "learning_rate": 4.4891943661971834e-05, | |
| "loss": 1.2842, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 61.21, | |
| "learning_rate": 4.4441690140845077e-05, | |
| "loss": 1.2826, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 61.9, | |
| "learning_rate": 4.399143661971831e-05, | |
| "loss": 1.2804, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 62.59, | |
| "learning_rate": 4.3541183098591555e-05, | |
| "loss": 1.2781, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 63.27, | |
| "learning_rate": 4.30909295774648e-05, | |
| "loss": 1.2757, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 63.96, | |
| "learning_rate": 4.2640676056338026e-05, | |
| "loss": 1.2727, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 64.65, | |
| "learning_rate": 4.219042253521127e-05, | |
| "loss": 1.2703, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 65.34, | |
| "learning_rate": 4.174016901408451e-05, | |
| "loss": 1.2689, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 66.02, | |
| "learning_rate": 4.1289915492957746e-05, | |
| "loss": 1.2669, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 66.71, | |
| "learning_rate": 4.083966197183099e-05, | |
| "loss": 1.2649, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 67.4, | |
| "learning_rate": 4.038940845070423e-05, | |
| "loss": 1.2624, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 68.09, | |
| "learning_rate": 3.9939154929577466e-05, | |
| "loss": 1.2606, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 68.78, | |
| "learning_rate": 3.948890140845071e-05, | |
| "loss": 1.2589, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 69.46, | |
| "learning_rate": 3.903864788732395e-05, | |
| "loss": 1.2574, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 70.15, | |
| "learning_rate": 3.8588394366197187e-05, | |
| "loss": 1.2548, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 70.84, | |
| "learning_rate": 3.813814084507042e-05, | |
| "loss": 1.2534, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 71.53, | |
| "learning_rate": 3.7687887323943664e-05, | |
| "loss": 1.252, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 72.21, | |
| "learning_rate": 3.723763380281691e-05, | |
| "loss": 1.2505, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 72.9, | |
| "learning_rate": 3.678738028169015e-05, | |
| "loss": 1.2479, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 73.59, | |
| "learning_rate": 3.6337126760563385e-05, | |
| "loss": 1.2455, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 74.28, | |
| "learning_rate": 3.588687323943662e-05, | |
| "loss": 1.2454, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 74.97, | |
| "learning_rate": 3.543661971830986e-05, | |
| "loss": 1.2433, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 75.65, | |
| "learning_rate": 3.49863661971831e-05, | |
| "loss": 1.2406, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 76.34, | |
| "learning_rate": 3.453611267605634e-05, | |
| "loss": 1.239, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 77.03, | |
| "learning_rate": 3.408585915492958e-05, | |
| "loss": 1.2383, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 77.72, | |
| "learning_rate": 3.363560563380282e-05, | |
| "loss": 1.2364, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 78.4, | |
| "learning_rate": 3.3185352112676054e-05, | |
| "loss": 1.236, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 79.09, | |
| "learning_rate": 3.2735098591549296e-05, | |
| "loss": 1.2341, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 79.78, | |
| "learning_rate": 3.228484507042254e-05, | |
| "loss": 1.2328, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 80.47, | |
| "learning_rate": 3.183459154929578e-05, | |
| "loss": 1.2316, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 81.16, | |
| "learning_rate": 3.138433802816902e-05, | |
| "loss": 1.2297, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 81.84, | |
| "learning_rate": 3.093408450704225e-05, | |
| "loss": 1.2283, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 82.53, | |
| "learning_rate": 3.0483830985915498e-05, | |
| "loss": 1.2266, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 83.22, | |
| "learning_rate": 3.0033577464788734e-05, | |
| "loss": 1.2267, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 83.91, | |
| "learning_rate": 2.9583323943661973e-05, | |
| "loss": 1.2253, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 84.59, | |
| "learning_rate": 2.9133070422535215e-05, | |
| "loss": 1.2237, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 85.28, | |
| "learning_rate": 2.8682816901408454e-05, | |
| "loss": 1.2228, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 85.97, | |
| "learning_rate": 2.8232563380281693e-05, | |
| "loss": 1.2214, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 86.66, | |
| "learning_rate": 2.7782309859154932e-05, | |
| "loss": 1.2214, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 87.35, | |
| "learning_rate": 2.733205633802817e-05, | |
| "loss": 1.2193, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 88.03, | |
| "learning_rate": 2.6881802816901413e-05, | |
| "loss": 1.2171, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 88.72, | |
| "learning_rate": 2.643154929577465e-05, | |
| "loss": 1.2166, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 89.41, | |
| "learning_rate": 2.5981295774647888e-05, | |
| "loss": 1.2173, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 90.1, | |
| "learning_rate": 2.553104225352113e-05, | |
| "loss": 1.2138, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 90.78, | |
| "learning_rate": 2.508078873239437e-05, | |
| "loss": 1.2139, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 91.47, | |
| "learning_rate": 2.4630535211267605e-05, | |
| "loss": 1.2133, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 92.16, | |
| "learning_rate": 2.4180281690140847e-05, | |
| "loss": 1.2114, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 92.85, | |
| "learning_rate": 2.3730028169014086e-05, | |
| "loss": 1.2107, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 93.54, | |
| "learning_rate": 2.327977464788733e-05, | |
| "loss": 1.2104, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 94.22, | |
| "learning_rate": 2.2829521126760564e-05, | |
| "loss": 1.2097, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 94.91, | |
| "learning_rate": 2.2379267605633803e-05, | |
| "loss": 1.2085, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 95.6, | |
| "learning_rate": 2.1929014084507045e-05, | |
| "loss": 1.2066, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 96.29, | |
| "learning_rate": 2.1478760563380284e-05, | |
| "loss": 1.2061, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 96.97, | |
| "learning_rate": 2.102850704225352e-05, | |
| "loss": 1.206, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 97.66, | |
| "learning_rate": 2.0578253521126762e-05, | |
| "loss": 1.2041, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 98.35, | |
| "learning_rate": 2.0128e-05, | |
| "loss": 1.2045, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 99.04, | |
| "learning_rate": 1.967774647887324e-05, | |
| "loss": 1.2035, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 99.72, | |
| "learning_rate": 1.9227492957746482e-05, | |
| "loss": 1.2024, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 100.41, | |
| "learning_rate": 1.877723943661972e-05, | |
| "loss": 1.2015, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 101.1, | |
| "learning_rate": 1.8326985915492957e-05, | |
| "loss": 1.202, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 101.79, | |
| "learning_rate": 1.78767323943662e-05, | |
| "loss": 1.2004, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 102.48, | |
| "learning_rate": 1.7426478873239438e-05, | |
| "loss": 1.1994, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 103.16, | |
| "learning_rate": 1.6976225352112677e-05, | |
| "loss": 1.1981, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 103.85, | |
| "learning_rate": 1.6525971830985916e-05, | |
| "loss": 1.1981, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 104.54, | |
| "learning_rate": 1.6075718309859155e-05, | |
| "loss": 1.1964, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 105.23, | |
| "learning_rate": 1.5625464788732398e-05, | |
| "loss": 1.1976, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 105.91, | |
| "learning_rate": 1.5175211267605635e-05, | |
| "loss": 1.1964, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 106.6, | |
| "learning_rate": 1.4724957746478874e-05, | |
| "loss": 1.1957, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 107.29, | |
| "learning_rate": 1.4274704225352114e-05, | |
| "loss": 1.1953, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 107.98, | |
| "learning_rate": 1.3824450704225353e-05, | |
| "loss": 1.1939, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 108.67, | |
| "learning_rate": 1.3374197183098592e-05, | |
| "loss": 1.1926, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 109.35, | |
| "learning_rate": 1.2923943661971831e-05, | |
| "loss": 1.1921, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 110.04, | |
| "learning_rate": 1.2473690140845072e-05, | |
| "loss": 1.1924, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 110.73, | |
| "learning_rate": 1.2023436619718311e-05, | |
| "loss": 1.1912, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 111.42, | |
| "learning_rate": 1.157318309859155e-05, | |
| "loss": 1.1909, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 112.1, | |
| "learning_rate": 1.1122929577464789e-05, | |
| "loss": 1.1908, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 112.79, | |
| "learning_rate": 1.067267605633803e-05, | |
| "loss": 1.1897, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 113.48, | |
| "learning_rate": 1.022242253521127e-05, | |
| "loss": 1.1891, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 114.17, | |
| "learning_rate": 9.772169014084507e-06, | |
| "loss": 1.1891, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 114.86, | |
| "learning_rate": 9.321915492957746e-06, | |
| "loss": 1.1884, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 115.54, | |
| "learning_rate": 8.871661971830987e-06, | |
| "loss": 1.189, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 116.23, | |
| "learning_rate": 8.421408450704226e-06, | |
| "loss": 1.1874, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 116.92, | |
| "learning_rate": 7.971154929577467e-06, | |
| "loss": 1.187, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 117.61, | |
| "learning_rate": 7.520901408450705e-06, | |
| "loss": 1.1861, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 118.29, | |
| "learning_rate": 7.070647887323944e-06, | |
| "loss": 1.1856, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 118.98, | |
| "learning_rate": 6.620394366197184e-06, | |
| "loss": 1.1854, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 119.67, | |
| "learning_rate": 6.170140845070423e-06, | |
| "loss": 1.1849, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 120.36, | |
| "learning_rate": 5.719887323943662e-06, | |
| "loss": 1.1848, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 121.05, | |
| "learning_rate": 5.269633802816901e-06, | |
| "loss": 1.1852, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 121.73, | |
| "learning_rate": 4.819380281690141e-06, | |
| "loss": 1.1839, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 122.42, | |
| "learning_rate": 4.369126760563381e-06, | |
| "loss": 1.1843, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 123.11, | |
| "learning_rate": 3.91887323943662e-06, | |
| "loss": 1.1834, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 123.8, | |
| "learning_rate": 3.4686197183098598e-06, | |
| "loss": 1.183, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 124.48, | |
| "learning_rate": 3.018366197183099e-06, | |
| "loss": 1.183, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 125.17, | |
| "learning_rate": 2.568112676056338e-06, | |
| "loss": 1.1828, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 125.86, | |
| "learning_rate": 2.1178591549295775e-06, | |
| "loss": 1.1813, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 126.55, | |
| "learning_rate": 1.6676056338028171e-06, | |
| "loss": 1.1819, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 127.24, | |
| "learning_rate": 1.2173521126760563e-06, | |
| "loss": 1.1819, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 127.92, | |
| "learning_rate": 7.670985915492958e-07, | |
| "loss": 1.1809, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 128.61, | |
| "learning_rate": 3.1684507042253523e-07, | |
| "loss": 1.1809, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 128.95, | |
| "step": 187500, | |
| "total_flos": 1.0103830689474203e+20, | |
| "train_loss": 0.110439400390625, | |
| "train_runtime": 5222.9883, | |
| "train_samples_per_second": 73521.13, | |
| "train_steps_per_second": 35.899 | |
| } | |
| ], | |
| "max_steps": 187500, | |
| "num_train_epochs": 129, | |
| "total_flos": 1.0103830689474203e+20, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |