| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 939, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003194888178913738, | |
| "grad_norm": 6.409920785382119, | |
| "learning_rate": 2.1276595744680852e-07, | |
| "loss": 0.9383, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006389776357827476, | |
| "grad_norm": 5.882817659413674, | |
| "learning_rate": 4.2553191489361704e-07, | |
| "loss": 0.8501, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009584664536741214, | |
| "grad_norm": 5.949158200484977, | |
| "learning_rate": 6.382978723404255e-07, | |
| "loss": 0.8872, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.012779552715654952, | |
| "grad_norm": 5.705459169654703, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 0.8123, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01597444089456869, | |
| "grad_norm": 6.165398865418825, | |
| "learning_rate": 1.0638297872340427e-06, | |
| "loss": 0.8867, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.019169329073482427, | |
| "grad_norm": 5.901621861739117, | |
| "learning_rate": 1.276595744680851e-06, | |
| "loss": 0.848, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.022364217252396165, | |
| "grad_norm": 5.729605975390999, | |
| "learning_rate": 1.4893617021276596e-06, | |
| "loss": 0.8766, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.025559105431309903, | |
| "grad_norm": 4.7109637488813965, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.8249, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02875399361022364, | |
| "grad_norm": 4.440852197395593, | |
| "learning_rate": 1.9148936170212767e-06, | |
| "loss": 0.8056, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03194888178913738, | |
| "grad_norm": 4.068255335955712, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 0.7646, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03514376996805112, | |
| "grad_norm": 2.9662994476369344, | |
| "learning_rate": 2.340425531914894e-06, | |
| "loss": 0.7915, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.038338658146964855, | |
| "grad_norm": 2.5528675082475676, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 0.8249, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04153354632587859, | |
| "grad_norm": 2.442136937760188, | |
| "learning_rate": 2.765957446808511e-06, | |
| "loss": 0.767, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04472843450479233, | |
| "grad_norm": 3.009485060078495, | |
| "learning_rate": 2.978723404255319e-06, | |
| "loss": 0.7684, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04792332268370607, | |
| "grad_norm": 3.530505323803651, | |
| "learning_rate": 3.191489361702128e-06, | |
| "loss": 0.7301, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.051118210862619806, | |
| "grad_norm": 3.5912586753687012, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.7483, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.054313099041533544, | |
| "grad_norm": 3.242695851912524, | |
| "learning_rate": 3.6170212765957453e-06, | |
| "loss": 0.7003, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05750798722044728, | |
| "grad_norm": 3.1020257848632498, | |
| "learning_rate": 3.8297872340425535e-06, | |
| "loss": 0.7508, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06070287539936102, | |
| "grad_norm": 3.1595177932395546, | |
| "learning_rate": 4.042553191489362e-06, | |
| "loss": 0.7397, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06389776357827476, | |
| "grad_norm": 2.2887672112178294, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.669, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0670926517571885, | |
| "grad_norm": 1.8739235547973887, | |
| "learning_rate": 4.468085106382979e-06, | |
| "loss": 0.6473, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07028753993610223, | |
| "grad_norm": 1.6043319479062044, | |
| "learning_rate": 4.680851063829788e-06, | |
| "loss": 0.5916, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07348242811501597, | |
| "grad_norm": 1.4903043423735904, | |
| "learning_rate": 4.893617021276596e-06, | |
| "loss": 0.7035, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07667731629392971, | |
| "grad_norm": 1.5284966886424023, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.6841, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07987220447284345, | |
| "grad_norm": 1.7400544873250572, | |
| "learning_rate": 5.319148936170213e-06, | |
| "loss": 0.608, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08306709265175719, | |
| "grad_norm": 1.823355346961419, | |
| "learning_rate": 5.531914893617022e-06, | |
| "loss": 0.6812, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08626198083067092, | |
| "grad_norm": 1.5434873639853426, | |
| "learning_rate": 5.744680851063831e-06, | |
| "loss": 0.6243, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08945686900958466, | |
| "grad_norm": 1.4366139254965609, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 0.6559, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0926517571884984, | |
| "grad_norm": 1.2885186500127617, | |
| "learning_rate": 6.170212765957447e-06, | |
| "loss": 0.6615, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.09584664536741214, | |
| "grad_norm": 1.24950215759162, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.6502, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09904153354632587, | |
| "grad_norm": 1.3498220957297518, | |
| "learning_rate": 6.595744680851064e-06, | |
| "loss": 0.6197, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10223642172523961, | |
| "grad_norm": 1.3715708826482595, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.648, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.10543130990415335, | |
| "grad_norm": 1.2724124105045853, | |
| "learning_rate": 7.021276595744682e-06, | |
| "loss": 0.5946, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10862619808306709, | |
| "grad_norm": 1.0470319859101942, | |
| "learning_rate": 7.234042553191491e-06, | |
| "loss": 0.6183, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.11182108626198083, | |
| "grad_norm": 1.0168611398932235, | |
| "learning_rate": 7.446808510638298e-06, | |
| "loss": 0.6104, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.11501597444089456, | |
| "grad_norm": 1.0363638467793455, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 0.5497, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1182108626198083, | |
| "grad_norm": 1.0065437324012412, | |
| "learning_rate": 7.872340425531916e-06, | |
| "loss": 0.6082, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.12140575079872204, | |
| "grad_norm": 0.9397900918794712, | |
| "learning_rate": 8.085106382978723e-06, | |
| "loss": 0.607, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.12460063897763578, | |
| "grad_norm": 0.8971589534820209, | |
| "learning_rate": 8.297872340425532e-06, | |
| "loss": 0.5706, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.12779552715654952, | |
| "grad_norm": 0.9733107673169904, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.5445, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13099041533546327, | |
| "grad_norm": 1.0242616537914584, | |
| "learning_rate": 8.72340425531915e-06, | |
| "loss": 0.5692, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.134185303514377, | |
| "grad_norm": 0.9024153216202099, | |
| "learning_rate": 8.936170212765958e-06, | |
| "loss": 0.5916, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.13738019169329074, | |
| "grad_norm": 0.9635264772129024, | |
| "learning_rate": 9.148936170212767e-06, | |
| "loss": 0.5842, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.14057507987220447, | |
| "grad_norm": 1.0056120995653648, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 0.5588, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.14376996805111822, | |
| "grad_norm": 0.9275308523019801, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.5985, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.14696485623003194, | |
| "grad_norm": 1.0395462293521542, | |
| "learning_rate": 9.787234042553192e-06, | |
| "loss": 0.5751, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1501597444089457, | |
| "grad_norm": 0.8523118843095494, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6098, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.15335463258785942, | |
| "grad_norm": 0.8942183665281777, | |
| "learning_rate": 1.0212765957446808e-05, | |
| "loss": 0.5898, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.15654952076677317, | |
| "grad_norm": 0.8818105787041829, | |
| "learning_rate": 1.0425531914893619e-05, | |
| "loss": 0.5829, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1597444089456869, | |
| "grad_norm": 1.0300441845112693, | |
| "learning_rate": 1.0638297872340426e-05, | |
| "loss": 0.5514, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.16293929712460065, | |
| "grad_norm": 1.0423440565604452, | |
| "learning_rate": 1.0851063829787233e-05, | |
| "loss": 0.593, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.16613418530351437, | |
| "grad_norm": 0.949481155031613, | |
| "learning_rate": 1.1063829787234044e-05, | |
| "loss": 0.6092, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.16932907348242812, | |
| "grad_norm": 0.9369849481377671, | |
| "learning_rate": 1.1276595744680851e-05, | |
| "loss": 0.5577, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.17252396166134185, | |
| "grad_norm": 0.9618399687304353, | |
| "learning_rate": 1.1489361702127662e-05, | |
| "loss": 0.551, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1757188498402556, | |
| "grad_norm": 0.9655092891280169, | |
| "learning_rate": 1.170212765957447e-05, | |
| "loss": 0.5664, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.17891373801916932, | |
| "grad_norm": 1.0520390138780007, | |
| "learning_rate": 1.1914893617021277e-05, | |
| "loss": 0.5341, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.18210862619808307, | |
| "grad_norm": 0.969457321374093, | |
| "learning_rate": 1.2127659574468087e-05, | |
| "loss": 0.5643, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1853035143769968, | |
| "grad_norm": 0.8852786189340646, | |
| "learning_rate": 1.2340425531914895e-05, | |
| "loss": 0.543, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.18849840255591055, | |
| "grad_norm": 1.1488496953158756, | |
| "learning_rate": 1.2553191489361702e-05, | |
| "loss": 0.5653, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.19169329073482427, | |
| "grad_norm": 1.1296682096329418, | |
| "learning_rate": 1.2765957446808513e-05, | |
| "loss": 0.5565, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.19488817891373802, | |
| "grad_norm": 0.8854908232020008, | |
| "learning_rate": 1.297872340425532e-05, | |
| "loss": 0.5551, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.19808306709265175, | |
| "grad_norm": 0.8324763984138673, | |
| "learning_rate": 1.3191489361702127e-05, | |
| "loss": 0.5148, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2012779552715655, | |
| "grad_norm": 1.1990838238795918, | |
| "learning_rate": 1.3404255319148938e-05, | |
| "loss": 0.5541, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.20447284345047922, | |
| "grad_norm": 0.9277279636520797, | |
| "learning_rate": 1.3617021276595745e-05, | |
| "loss": 0.5484, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.20766773162939298, | |
| "grad_norm": 1.0648177480107957, | |
| "learning_rate": 1.3829787234042556e-05, | |
| "loss": 0.5834, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2108626198083067, | |
| "grad_norm": 1.1696287149009121, | |
| "learning_rate": 1.4042553191489363e-05, | |
| "loss": 0.5202, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.21405750798722045, | |
| "grad_norm": 1.0169526918813456, | |
| "learning_rate": 1.425531914893617e-05, | |
| "loss": 0.5276, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.21725239616613418, | |
| "grad_norm": 1.0568993802356397, | |
| "learning_rate": 1.4468085106382981e-05, | |
| "loss": 0.5184, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.22044728434504793, | |
| "grad_norm": 1.1725904166905932, | |
| "learning_rate": 1.4680851063829789e-05, | |
| "loss": 0.5715, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.22364217252396165, | |
| "grad_norm": 1.0117321152787573, | |
| "learning_rate": 1.4893617021276596e-05, | |
| "loss": 0.5911, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2268370607028754, | |
| "grad_norm": 0.8952457851305932, | |
| "learning_rate": 1.5106382978723407e-05, | |
| "loss": 0.5053, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.23003194888178913, | |
| "grad_norm": 1.0847433664961181, | |
| "learning_rate": 1.5319148936170214e-05, | |
| "loss": 0.5301, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.23322683706070288, | |
| "grad_norm": 0.9381541668113672, | |
| "learning_rate": 1.5531914893617023e-05, | |
| "loss": 0.5325, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2364217252396166, | |
| "grad_norm": 0.9870753843267154, | |
| "learning_rate": 1.5744680851063832e-05, | |
| "loss": 0.5132, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.23961661341853036, | |
| "grad_norm": 0.8860262179526839, | |
| "learning_rate": 1.595744680851064e-05, | |
| "loss": 0.5175, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.24281150159744408, | |
| "grad_norm": 0.838264095104911, | |
| "learning_rate": 1.6170212765957446e-05, | |
| "loss": 0.5411, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.24600638977635783, | |
| "grad_norm": 0.9067720513160165, | |
| "learning_rate": 1.6382978723404255e-05, | |
| "loss": 0.4906, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.24920127795527156, | |
| "grad_norm": 0.9230169949949911, | |
| "learning_rate": 1.6595744680851064e-05, | |
| "loss": 0.5305, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.2523961661341853, | |
| "grad_norm": 1.0482361365213537, | |
| "learning_rate": 1.6808510638297873e-05, | |
| "loss": 0.5442, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.25559105431309903, | |
| "grad_norm": 0.8237515236418056, | |
| "learning_rate": 1.7021276595744682e-05, | |
| "loss": 0.5029, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.25878594249201275, | |
| "grad_norm": 0.8569463159961227, | |
| "learning_rate": 1.723404255319149e-05, | |
| "loss": 0.5536, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.26198083067092653, | |
| "grad_norm": 0.9656094274228123, | |
| "learning_rate": 1.74468085106383e-05, | |
| "loss": 0.5666, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.26517571884984026, | |
| "grad_norm": 0.879784737531998, | |
| "learning_rate": 1.765957446808511e-05, | |
| "loss": 0.5171, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.268370607028754, | |
| "grad_norm": 0.8745385454305299, | |
| "learning_rate": 1.7872340425531915e-05, | |
| "loss": 0.4754, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.2715654952076677, | |
| "grad_norm": 0.8610209256691467, | |
| "learning_rate": 1.8085106382978724e-05, | |
| "loss": 0.5713, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2747603833865815, | |
| "grad_norm": 1.0200117897200043, | |
| "learning_rate": 1.8297872340425533e-05, | |
| "loss": 0.549, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2779552715654952, | |
| "grad_norm": 1.0381714037531296, | |
| "learning_rate": 1.8510638297872342e-05, | |
| "loss": 0.5522, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.28115015974440893, | |
| "grad_norm": 0.9237569605348899, | |
| "learning_rate": 1.872340425531915e-05, | |
| "loss": 0.5825, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.28434504792332266, | |
| "grad_norm": 1.0109855714809834, | |
| "learning_rate": 1.893617021276596e-05, | |
| "loss": 0.4851, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.28753993610223644, | |
| "grad_norm": 0.9947036818367249, | |
| "learning_rate": 1.914893617021277e-05, | |
| "loss": 0.5195, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.29073482428115016, | |
| "grad_norm": 1.188164995511023, | |
| "learning_rate": 1.9361702127659575e-05, | |
| "loss": 0.5328, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2939297124600639, | |
| "grad_norm": 0.9985112774386741, | |
| "learning_rate": 1.9574468085106384e-05, | |
| "loss": 0.4985, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2971246006389776, | |
| "grad_norm": 1.147610921973416, | |
| "learning_rate": 1.9787234042553193e-05, | |
| "loss": 0.5353, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3003194888178914, | |
| "grad_norm": 1.2244624011374305, | |
| "learning_rate": 2e-05, | |
| "loss": 0.5853, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3035143769968051, | |
| "grad_norm": 1.0237954246117738, | |
| "learning_rate": 1.9999930887622756e-05, | |
| "loss": 0.5223, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.30670926517571884, | |
| "grad_norm": 0.9347818625754574, | |
| "learning_rate": 1.9999723551446323e-05, | |
| "loss": 0.5499, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.30990415335463256, | |
| "grad_norm": 0.9931906754170975, | |
| "learning_rate": 1.9999377994336602e-05, | |
| "loss": 0.5153, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.31309904153354634, | |
| "grad_norm": 1.100664016646544, | |
| "learning_rate": 1.9998894221070052e-05, | |
| "loss": 0.5421, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.31629392971246006, | |
| "grad_norm": 1.072898943505642, | |
| "learning_rate": 1.9998272238333606e-05, | |
| "loss": 0.522, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3194888178913738, | |
| "grad_norm": 0.8788562431890847, | |
| "learning_rate": 1.9997512054724616e-05, | |
| "loss": 0.5768, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3226837060702875, | |
| "grad_norm": 1.005114294076789, | |
| "learning_rate": 1.99966136807507e-05, | |
| "loss": 0.5366, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3258785942492013, | |
| "grad_norm": 0.9818208632299064, | |
| "learning_rate": 1.99955771288296e-05, | |
| "loss": 0.5629, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.329073482428115, | |
| "grad_norm": 0.9550602655061554, | |
| "learning_rate": 1.9994402413289044e-05, | |
| "loss": 0.5057, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.33226837060702874, | |
| "grad_norm": 0.8574474170220133, | |
| "learning_rate": 1.99930895503665e-05, | |
| "loss": 0.5436, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3354632587859425, | |
| "grad_norm": 0.8630661623280808, | |
| "learning_rate": 1.9991638558208987e-05, | |
| "loss": 0.5043, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.33865814696485624, | |
| "grad_norm": 0.9861864624707406, | |
| "learning_rate": 1.9990049456872805e-05, | |
| "loss": 0.4998, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.34185303514376997, | |
| "grad_norm": 1.1731144874278645, | |
| "learning_rate": 1.998832226832327e-05, | |
| "loss": 0.5621, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3450479233226837, | |
| "grad_norm": 0.9198118564044715, | |
| "learning_rate": 1.9986457016434402e-05, | |
| "loss": 0.5209, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.34824281150159747, | |
| "grad_norm": 0.9818139368666934, | |
| "learning_rate": 1.99844537269886e-05, | |
| "loss": 0.5304, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3514376996805112, | |
| "grad_norm": 0.871284930323514, | |
| "learning_rate": 1.9982312427676286e-05, | |
| "loss": 0.5295, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3546325878594249, | |
| "grad_norm": 1.1442536730263613, | |
| "learning_rate": 1.9980033148095514e-05, | |
| "loss": 0.6233, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.35782747603833864, | |
| "grad_norm": 1.0303042499178714, | |
| "learning_rate": 1.997761591975157e-05, | |
| "loss": 0.5829, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3610223642172524, | |
| "grad_norm": 0.9699824744336917, | |
| "learning_rate": 1.9975060776056538e-05, | |
| "loss": 0.5688, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.36421725239616615, | |
| "grad_norm": 0.9059578869436408, | |
| "learning_rate": 1.9972367752328824e-05, | |
| "loss": 0.6128, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.36741214057507987, | |
| "grad_norm": 1.0030044320881064, | |
| "learning_rate": 1.9969536885792685e-05, | |
| "loss": 0.491, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3706070287539936, | |
| "grad_norm": 0.880432268699565, | |
| "learning_rate": 1.9966568215577704e-05, | |
| "loss": 0.525, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3738019169329074, | |
| "grad_norm": 0.9288360039901852, | |
| "learning_rate": 1.9963461782718247e-05, | |
| "loss": 0.4981, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3769968051118211, | |
| "grad_norm": 0.8685285833946184, | |
| "learning_rate": 1.996021763015291e-05, | |
| "loss": 0.5174, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3801916932907348, | |
| "grad_norm": 0.9456704365811888, | |
| "learning_rate": 1.9956835802723916e-05, | |
| "loss": 0.5116, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.38338658146964855, | |
| "grad_norm": 0.8672982876474767, | |
| "learning_rate": 1.995331634717649e-05, | |
| "loss": 0.5191, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3865814696485623, | |
| "grad_norm": 1.2439896447102605, | |
| "learning_rate": 1.9949659312158212e-05, | |
| "loss": 0.5418, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.38977635782747605, | |
| "grad_norm": 1.1971242384225746, | |
| "learning_rate": 1.9945864748218368e-05, | |
| "loss": 0.5135, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.3929712460063898, | |
| "grad_norm": 0.8834188032057455, | |
| "learning_rate": 1.994193270780722e-05, | |
| "loss": 0.555, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.3961661341853035, | |
| "grad_norm": 0.9505972320015716, | |
| "learning_rate": 1.9937863245275303e-05, | |
| "loss": 0.5506, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.3993610223642173, | |
| "grad_norm": 1.2212739220097744, | |
| "learning_rate": 1.9933656416872664e-05, | |
| "loss": 0.5404, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.402555910543131, | |
| "grad_norm": 0.9304190644575443, | |
| "learning_rate": 1.992931228074808e-05, | |
| "loss": 0.6051, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.4057507987220447, | |
| "grad_norm": 0.9409095688366127, | |
| "learning_rate": 1.9924830896948268e-05, | |
| "loss": 0.5515, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.40894568690095845, | |
| "grad_norm": 0.8699906408491971, | |
| "learning_rate": 1.9920212327417044e-05, | |
| "loss": 0.5022, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.41214057507987223, | |
| "grad_norm": 0.9946776707428178, | |
| "learning_rate": 1.991545663599448e-05, | |
| "loss": 0.5309, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.41533546325878595, | |
| "grad_norm": 1.01014181570919, | |
| "learning_rate": 1.9910563888415996e-05, | |
| "loss": 0.5681, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4185303514376997, | |
| "grad_norm": 0.93629937085659, | |
| "learning_rate": 1.990553415231148e-05, | |
| "loss": 0.5576, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.4217252396166134, | |
| "grad_norm": 1.1184865559025652, | |
| "learning_rate": 1.9900367497204328e-05, | |
| "loss": 0.5088, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4249201277955272, | |
| "grad_norm": 1.0388424246571961, | |
| "learning_rate": 1.9895063994510512e-05, | |
| "loss": 0.5269, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.4281150159744409, | |
| "grad_norm": 0.9588973054432186, | |
| "learning_rate": 1.9889623717537564e-05, | |
| "loss": 0.5914, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.43130990415335463, | |
| "grad_norm": 1.0047992755730388, | |
| "learning_rate": 1.988404674148358e-05, | |
| "loss": 0.5335, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.43450479233226835, | |
| "grad_norm": 0.9513706884109058, | |
| "learning_rate": 1.9878333143436172e-05, | |
| "loss": 0.5252, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.43769968051118213, | |
| "grad_norm": 0.9491047212617997, | |
| "learning_rate": 1.987248300237141e-05, | |
| "loss": 0.5289, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.44089456869009586, | |
| "grad_norm": 1.033338903728736, | |
| "learning_rate": 1.9866496399152732e-05, | |
| "loss": 0.548, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4440894568690096, | |
| "grad_norm": 0.981330967452213, | |
| "learning_rate": 1.9860373416529804e-05, | |
| "loss": 0.5533, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.4472843450479233, | |
| "grad_norm": 0.9771678046126266, | |
| "learning_rate": 1.9854114139137408e-05, | |
| "loss": 0.5165, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4504792332268371, | |
| "grad_norm": 0.9318509073079462, | |
| "learning_rate": 1.984771865349425e-05, | |
| "loss": 0.5897, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4536741214057508, | |
| "grad_norm": 0.9749289154723085, | |
| "learning_rate": 1.9841187048001774e-05, | |
| "loss": 0.5209, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.45686900958466453, | |
| "grad_norm": 0.9585929892181612, | |
| "learning_rate": 1.9834519412942938e-05, | |
| "loss": 0.5318, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.46006389776357826, | |
| "grad_norm": 0.9990483753288364, | |
| "learning_rate": 1.9827715840480962e-05, | |
| "loss": 0.5166, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.46325878594249204, | |
| "grad_norm": 1.0335238778784028, | |
| "learning_rate": 1.982077642465806e-05, | |
| "loss": 0.5423, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.46645367412140576, | |
| "grad_norm": 1.0867942922933842, | |
| "learning_rate": 1.9813701261394136e-05, | |
| "loss": 0.525, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4696485623003195, | |
| "grad_norm": 1.116159290481366, | |
| "learning_rate": 1.9806490448485463e-05, | |
| "loss": 0.5217, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4728434504792332, | |
| "grad_norm": 1.0276431277812408, | |
| "learning_rate": 1.9799144085603323e-05, | |
| "loss": 0.5613, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.476038338658147, | |
| "grad_norm": 0.9607572461751404, | |
| "learning_rate": 1.9791662274292638e-05, | |
| "loss": 0.5696, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4792332268370607, | |
| "grad_norm": 0.7743742651857493, | |
| "learning_rate": 1.978404511797056e-05, | |
| "loss": 0.4712, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.48242811501597443, | |
| "grad_norm": 0.9379781740009565, | |
| "learning_rate": 1.9776292721925046e-05, | |
| "loss": 0.5456, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.48562300319488816, | |
| "grad_norm": 0.9500162315992806, | |
| "learning_rate": 1.97684051933134e-05, | |
| "loss": 0.5093, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.48881789137380194, | |
| "grad_norm": 0.9463427069451845, | |
| "learning_rate": 1.9760382641160792e-05, | |
| "loss": 0.523, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.49201277955271566, | |
| "grad_norm": 0.964902547990394, | |
| "learning_rate": 1.9752225176358757e-05, | |
| "loss": 0.5154, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.4952076677316294, | |
| "grad_norm": 0.9266397117812072, | |
| "learning_rate": 1.974393291166364e-05, | |
| "loss": 0.5242, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.4984025559105431, | |
| "grad_norm": 1.0373423802821105, | |
| "learning_rate": 1.973550596169508e-05, | |
| "loss": 0.5526, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5015974440894568, | |
| "grad_norm": 0.8331184703908079, | |
| "learning_rate": 1.9726944442934378e-05, | |
| "loss": 0.4775, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5047923322683706, | |
| "grad_norm": 0.8424437954994451, | |
| "learning_rate": 1.971824847372292e-05, | |
| "loss": 0.4946, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5079872204472844, | |
| "grad_norm": 0.8924532125572432, | |
| "learning_rate": 1.9709418174260523e-05, | |
| "loss": 0.5213, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5111821086261981, | |
| "grad_norm": 1.0238137951842978, | |
| "learning_rate": 1.9700453666603786e-05, | |
| "loss": 0.504, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5143769968051118, | |
| "grad_norm": 0.8696826053287715, | |
| "learning_rate": 1.96913550746644e-05, | |
| "loss": 0.5285, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5175718849840255, | |
| "grad_norm": 1.1419625361492167, | |
| "learning_rate": 1.9682122524207426e-05, | |
| "loss": 0.5258, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5207667731629393, | |
| "grad_norm": 1.0717562011005641, | |
| "learning_rate": 1.9672756142849564e-05, | |
| "loss": 0.5622, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5239616613418531, | |
| "grad_norm": 1.082362120758369, | |
| "learning_rate": 1.9663256060057395e-05, | |
| "loss": 0.5459, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5271565495207667, | |
| "grad_norm": 0.9058526491846522, | |
| "learning_rate": 1.965362240714557e-05, | |
| "loss": 0.535, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5303514376996805, | |
| "grad_norm": 1.0636353754052472, | |
| "learning_rate": 1.9643855317275036e-05, | |
| "loss": 0.5187, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5335463258785943, | |
| "grad_norm": 0.8841768134611658, | |
| "learning_rate": 1.963395492545114e-05, | |
| "loss": 0.4926, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.536741214057508, | |
| "grad_norm": 0.8454615826601668, | |
| "learning_rate": 1.9623921368521814e-05, | |
| "loss": 0.4925, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5399361022364217, | |
| "grad_norm": 0.8525089279579146, | |
| "learning_rate": 1.961375478517564e-05, | |
| "loss": 0.554, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5431309904153354, | |
| "grad_norm": 0.9507938815359402, | |
| "learning_rate": 1.9603455315939976e-05, | |
| "loss": 0.5381, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5463258785942492, | |
| "grad_norm": 0.8663401759485011, | |
| "learning_rate": 1.9593023103178984e-05, | |
| "loss": 0.5398, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.549520766773163, | |
| "grad_norm": 0.9927086656087859, | |
| "learning_rate": 1.9582458291091664e-05, | |
| "loss": 0.5363, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5527156549520766, | |
| "grad_norm": 0.8142362736099577, | |
| "learning_rate": 1.9571761025709874e-05, | |
| "loss": 0.5441, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5559105431309904, | |
| "grad_norm": 0.9363468107783036, | |
| "learning_rate": 1.95609314548963e-05, | |
| "loss": 0.5771, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5591054313099042, | |
| "grad_norm": 0.7489334767030862, | |
| "learning_rate": 1.954996972834242e-05, | |
| "loss": 0.5208, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5623003194888179, | |
| "grad_norm": 0.934302740617694, | |
| "learning_rate": 1.953887599756643e-05, | |
| "loss": 0.5107, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5654952076677316, | |
| "grad_norm": 0.8012783490329485, | |
| "learning_rate": 1.9527650415911154e-05, | |
| "loss": 0.5377, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5686900958466453, | |
| "grad_norm": 0.9181068406934503, | |
| "learning_rate": 1.9516293138541914e-05, | |
| "loss": 0.5111, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5718849840255591, | |
| "grad_norm": 0.8305500501619189, | |
| "learning_rate": 1.95048043224444e-05, | |
| "loss": 0.5088, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5750798722044729, | |
| "grad_norm": 1.066440074003196, | |
| "learning_rate": 1.9493184126422494e-05, | |
| "loss": 0.5453, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5782747603833865, | |
| "grad_norm": 1.107675123138524, | |
| "learning_rate": 1.948143271109606e-05, | |
| "loss": 0.5425, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5814696485623003, | |
| "grad_norm": 0.8978276499968547, | |
| "learning_rate": 1.946955023889876e-05, | |
| "loss": 0.527, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5846645367412141, | |
| "grad_norm": 0.7949008147878583, | |
| "learning_rate": 1.9457536874075765e-05, | |
| "loss": 0.5247, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5878594249201278, | |
| "grad_norm": 0.974636844868348, | |
| "learning_rate": 1.9445392782681523e-05, | |
| "loss": 0.5075, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5910543130990416, | |
| "grad_norm": 0.9163909881555694, | |
| "learning_rate": 1.9433118132577432e-05, | |
| "loss": 0.5242, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5942492012779552, | |
| "grad_norm": 0.9615667703817601, | |
| "learning_rate": 1.9420713093429548e-05, | |
| "loss": 0.4878, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.597444089456869, | |
| "grad_norm": 0.8959730227876819, | |
| "learning_rate": 1.9408177836706215e-05, | |
| "loss": 0.5006, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6006389776357828, | |
| "grad_norm": 0.9791864357100734, | |
| "learning_rate": 1.939551253567571e-05, | |
| "loss": 0.508, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6038338658146964, | |
| "grad_norm": 0.8656035894165334, | |
| "learning_rate": 1.9382717365403854e-05, | |
| "loss": 0.4921, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6070287539936102, | |
| "grad_norm": 1.0378802554109634, | |
| "learning_rate": 1.9369792502751568e-05, | |
| "loss": 0.5269, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.610223642172524, | |
| "grad_norm": 0.843711222780129, | |
| "learning_rate": 1.9356738126372448e-05, | |
| "loss": 0.4818, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6134185303514377, | |
| "grad_norm": 0.9506506682319595, | |
| "learning_rate": 1.9343554416710293e-05, | |
| "loss": 0.5222, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6166134185303515, | |
| "grad_norm": 0.9158618299618471, | |
| "learning_rate": 1.9330241555996606e-05, | |
| "loss": 0.5458, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6198083067092651, | |
| "grad_norm": 0.9450209037071783, | |
| "learning_rate": 1.9316799728248074e-05, | |
| "loss": 0.5328, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6230031948881789, | |
| "grad_norm": 0.9060814081186569, | |
| "learning_rate": 1.930322911926404e-05, | |
| "loss": 0.5213, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6261980830670927, | |
| "grad_norm": 0.9414520961878969, | |
| "learning_rate": 1.92895299166239e-05, | |
| "loss": 0.5231, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6293929712460063, | |
| "grad_norm": 0.7921738327634565, | |
| "learning_rate": 1.9275702309684557e-05, | |
| "loss": 0.5229, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6325878594249201, | |
| "grad_norm": 0.8314517204939224, | |
| "learning_rate": 1.9261746489577767e-05, | |
| "loss": 0.4838, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6357827476038339, | |
| "grad_norm": 0.849639036650939, | |
| "learning_rate": 1.924766264920751e-05, | |
| "loss": 0.4933, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6389776357827476, | |
| "grad_norm": 0.847794453627695, | |
| "learning_rate": 1.923345098324732e-05, | |
| "loss": 0.4876, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6421725239616614, | |
| "grad_norm": 0.9533248917368539, | |
| "learning_rate": 1.92191116881376e-05, | |
| "loss": 0.527, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.645367412140575, | |
| "grad_norm": 0.8517039461818795, | |
| "learning_rate": 1.9204644962082915e-05, | |
| "loss": 0.54, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6485623003194888, | |
| "grad_norm": 0.9145485201406112, | |
| "learning_rate": 1.9190051005049218e-05, | |
| "loss": 0.4843, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6517571884984026, | |
| "grad_norm": 1.0820771145546035, | |
| "learning_rate": 1.917533001876113e-05, | |
| "loss": 0.5369, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6549520766773163, | |
| "grad_norm": 0.9383437313008715, | |
| "learning_rate": 1.9160482206699124e-05, | |
| "loss": 0.4931, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.65814696485623, | |
| "grad_norm": 0.9268071001516084, | |
| "learning_rate": 1.9145507774096706e-05, | |
| "loss": 0.5258, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6613418530351438, | |
| "grad_norm": 1.0204489467118907, | |
| "learning_rate": 1.9130406927937616e-05, | |
| "loss": 0.4991, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6645367412140575, | |
| "grad_norm": 0.9779602496725159, | |
| "learning_rate": 1.9115179876952925e-05, | |
| "loss": 0.5631, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6677316293929713, | |
| "grad_norm": 0.8699335339360664, | |
| "learning_rate": 1.9099826831618168e-05, | |
| "loss": 0.5526, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.670926517571885, | |
| "grad_norm": 0.8695595019031475, | |
| "learning_rate": 1.9084348004150442e-05, | |
| "loss": 0.4992, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6741214057507987, | |
| "grad_norm": 0.9392019689943826, | |
| "learning_rate": 1.9068743608505454e-05, | |
| "loss": 0.521, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6773162939297125, | |
| "grad_norm": 0.8143307536090024, | |
| "learning_rate": 1.9053013860374588e-05, | |
| "loss": 0.5414, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6805111821086262, | |
| "grad_norm": 0.9222591688630268, | |
| "learning_rate": 1.9037158977181894e-05, | |
| "loss": 0.5494, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6837060702875399, | |
| "grad_norm": 0.9812040866307158, | |
| "learning_rate": 1.9021179178081107e-05, | |
| "loss": 0.5621, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6869009584664537, | |
| "grad_norm": 0.8094811465097489, | |
| "learning_rate": 1.900507468395261e-05, | |
| "loss": 0.4696, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6900958466453674, | |
| "grad_norm": 0.9772505283003321, | |
| "learning_rate": 1.8988845717400375e-05, | |
| "loss": 0.5201, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6932907348242812, | |
| "grad_norm": 0.9431810275346089, | |
| "learning_rate": 1.89724925027489e-05, | |
| "loss": 0.5601, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6964856230031949, | |
| "grad_norm": 0.9561884071389894, | |
| "learning_rate": 1.8956015266040086e-05, | |
| "loss": 0.5517, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.6996805111821086, | |
| "grad_norm": 1.058918826761409, | |
| "learning_rate": 1.8939414235030137e-05, | |
| "loss": 0.5498, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7028753993610224, | |
| "grad_norm": 0.953107551204602, | |
| "learning_rate": 1.8922689639186387e-05, | |
| "loss": 0.5442, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7060702875399361, | |
| "grad_norm": 1.0633099150216911, | |
| "learning_rate": 1.890584170968417e-05, | |
| "loss": 0.516, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7092651757188498, | |
| "grad_norm": 0.8612205651102657, | |
| "learning_rate": 1.888887067940356e-05, | |
| "loss": 0.5438, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7124600638977636, | |
| "grad_norm": 0.9596989186750442, | |
| "learning_rate": 1.8871776782926216e-05, | |
| "loss": 0.487, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7156549520766773, | |
| "grad_norm": 0.8744406461422123, | |
| "learning_rate": 1.8854560256532098e-05, | |
| "loss": 0.4993, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7188498402555911, | |
| "grad_norm": 0.8321794065646121, | |
| "learning_rate": 1.8837221338196228e-05, | |
| "loss": 0.5049, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.7220447284345048, | |
| "grad_norm": 0.983227338821679, | |
| "learning_rate": 1.8819760267585368e-05, | |
| "loss": 0.5522, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7252396166134185, | |
| "grad_norm": 0.8335553459135192, | |
| "learning_rate": 1.880217728605474e-05, | |
| "loss": 0.5468, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7284345047923323, | |
| "grad_norm": 0.8749858950751359, | |
| "learning_rate": 1.8784472636644677e-05, | |
| "loss": 0.5146, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.731629392971246, | |
| "grad_norm": 0.9530182539257116, | |
| "learning_rate": 1.8766646564077265e-05, | |
| "loss": 0.52, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7348242811501597, | |
| "grad_norm": 0.8423941851636887, | |
| "learning_rate": 1.8748699314752947e-05, | |
| "loss": 0.5076, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7380191693290735, | |
| "grad_norm": 1.0674983177020294, | |
| "learning_rate": 1.8730631136747138e-05, | |
| "loss": 0.5162, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7412140575079872, | |
| "grad_norm": 0.7547725014119157, | |
| "learning_rate": 1.871244227980679e-05, | |
| "loss": 0.5258, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.744408945686901, | |
| "grad_norm": 0.8478932101355969, | |
| "learning_rate": 1.8694132995346924e-05, | |
| "loss": 0.5008, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7476038338658147, | |
| "grad_norm": 0.7976275751937845, | |
| "learning_rate": 1.8675703536447178e-05, | |
| "loss": 0.4979, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7507987220447284, | |
| "grad_norm": 0.9815318332469554, | |
| "learning_rate": 1.8657154157848302e-05, | |
| "loss": 0.5145, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7539936102236422, | |
| "grad_norm": 0.7607009059819897, | |
| "learning_rate": 1.8638485115948612e-05, | |
| "loss": 0.4847, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7571884984025559, | |
| "grad_norm": 0.99749471399467, | |
| "learning_rate": 1.8619696668800494e-05, | |
| "loss": 0.5308, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7603833865814696, | |
| "grad_norm": 0.8240339314330095, | |
| "learning_rate": 1.860078907610679e-05, | |
| "loss": 0.5046, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7635782747603834, | |
| "grad_norm": 1.1284168756143547, | |
| "learning_rate": 1.858176259921724e-05, | |
| "loss": 0.5171, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7667731629392971, | |
| "grad_norm": 0.8803422345999297, | |
| "learning_rate": 1.856261750112485e-05, | |
| "loss": 0.4918, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7699680511182109, | |
| "grad_norm": 0.982879274763817, | |
| "learning_rate": 1.8543354046462275e-05, | |
| "loss": 0.5022, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7731629392971247, | |
| "grad_norm": 0.9891187325137296, | |
| "learning_rate": 1.8523972501498136e-05, | |
| "loss": 0.48, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7763578274760383, | |
| "grad_norm": 0.9467043035719503, | |
| "learning_rate": 1.850447313413337e-05, | |
| "loss": 0.4905, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7795527156549521, | |
| "grad_norm": 1.0150262185499397, | |
| "learning_rate": 1.8484856213897496e-05, | |
| "loss": 0.5968, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7827476038338658, | |
| "grad_norm": 0.8248018852173341, | |
| "learning_rate": 1.846512201194492e-05, | |
| "loss": 0.5219, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7859424920127795, | |
| "grad_norm": 0.8832290166642166, | |
| "learning_rate": 1.844527080105116e-05, | |
| "loss": 0.5187, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7891373801916933, | |
| "grad_norm": 1.009035400264542, | |
| "learning_rate": 1.8425302855609088e-05, | |
| "loss": 0.5627, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.792332268370607, | |
| "grad_norm": 0.8594597717171026, | |
| "learning_rate": 1.8405218451625144e-05, | |
| "loss": 0.514, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7955271565495208, | |
| "grad_norm": 0.7078775094856737, | |
| "learning_rate": 1.8385017866715507e-05, | |
| "loss": 0.4998, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7987220447284346, | |
| "grad_norm": 0.8371833475631262, | |
| "learning_rate": 1.8364701380102267e-05, | |
| "loss": 0.5127, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8019169329073482, | |
| "grad_norm": 0.787045292407146, | |
| "learning_rate": 1.8344269272609567e-05, | |
| "loss": 0.4652, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.805111821086262, | |
| "grad_norm": 0.7958452197463651, | |
| "learning_rate": 1.83237218266597e-05, | |
| "loss": 0.5054, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8083067092651757, | |
| "grad_norm": 0.8631229364890092, | |
| "learning_rate": 1.830305932626924e-05, | |
| "loss": 0.4939, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.8115015974440895, | |
| "grad_norm": 0.8057179814750826, | |
| "learning_rate": 1.8282282057045087e-05, | |
| "loss": 0.4639, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.8146964856230032, | |
| "grad_norm": 0.8064404110602067, | |
| "learning_rate": 1.8261390306180544e-05, | |
| "loss": 0.4844, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8178913738019169, | |
| "grad_norm": 0.7816773313940623, | |
| "learning_rate": 1.8240384362451316e-05, | |
| "loss": 0.5076, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8210862619808307, | |
| "grad_norm": 0.9362930862782485, | |
| "learning_rate": 1.8219264516211543e-05, | |
| "loss": 0.4893, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8242811501597445, | |
| "grad_norm": 0.9138184401157835, | |
| "learning_rate": 1.819803105938979e-05, | |
| "loss": 0.4813, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8274760383386581, | |
| "grad_norm": 0.9494662952500172, | |
| "learning_rate": 1.8176684285484985e-05, | |
| "loss": 0.4997, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8306709265175719, | |
| "grad_norm": 0.8364808839715582, | |
| "learning_rate": 1.815522448956239e-05, | |
| "loss": 0.5245, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8338658146964856, | |
| "grad_norm": 1.0245391927888499, | |
| "learning_rate": 1.8133651968249503e-05, | |
| "loss": 0.511, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.8370607028753994, | |
| "grad_norm": 0.8892995493967196, | |
| "learning_rate": 1.8111967019731977e-05, | |
| "loss": 0.5461, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.8402555910543131, | |
| "grad_norm": 0.8390501367891469, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.533, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.8434504792332268, | |
| "grad_norm": 0.9936383454596885, | |
| "learning_rate": 1.8068261041591548e-05, | |
| "loss": 0.5595, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8466453674121406, | |
| "grad_norm": 0.8843876722914896, | |
| "learning_rate": 1.8046240616093452e-05, | |
| "loss": 0.5091, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8498402555910544, | |
| "grad_norm": 0.8462745657107433, | |
| "learning_rate": 1.802410897163199e-05, | |
| "loss": 0.4691, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.853035143769968, | |
| "grad_norm": 0.8659375070431975, | |
| "learning_rate": 1.800186641412126e-05, | |
| "loss": 0.495, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8562300319488818, | |
| "grad_norm": 0.9944074080731445, | |
| "learning_rate": 1.7979513251008475e-05, | |
| "loss": 0.4873, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8594249201277955, | |
| "grad_norm": 0.9728054270611681, | |
| "learning_rate": 1.7957049791269684e-05, | |
| "loss": 0.5081, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8626198083067093, | |
| "grad_norm": 0.8692424606646845, | |
| "learning_rate": 1.7934476345405508e-05, | |
| "loss": 0.5499, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.865814696485623, | |
| "grad_norm": 1.1943727229261953, | |
| "learning_rate": 1.791179322543684e-05, | |
| "loss": 0.5376, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8690095846645367, | |
| "grad_norm": 1.0506722546815361, | |
| "learning_rate": 1.788900074490056e-05, | |
| "loss": 0.5016, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8722044728434505, | |
| "grad_norm": 0.9965124501430621, | |
| "learning_rate": 1.7866099218845164e-05, | |
| "loss": 0.4912, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8753993610223643, | |
| "grad_norm": 1.0256398553570618, | |
| "learning_rate": 1.7843088963826437e-05, | |
| "loss": 0.5407, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8785942492012779, | |
| "grad_norm": 0.8102238962254681, | |
| "learning_rate": 1.7819970297903056e-05, | |
| "loss": 0.5202, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8817891373801917, | |
| "grad_norm": 1.1591745272192227, | |
| "learning_rate": 1.7796743540632226e-05, | |
| "loss": 0.5356, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8849840255591054, | |
| "grad_norm": 1.0153401381137175, | |
| "learning_rate": 1.7773409013065222e-05, | |
| "loss": 0.5088, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8881789137380192, | |
| "grad_norm": 0.8212195883494724, | |
| "learning_rate": 1.7749967037742976e-05, | |
| "loss": 0.4674, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8913738019169329, | |
| "grad_norm": 1.0157100696911838, | |
| "learning_rate": 1.772641793869162e-05, | |
| "loss": 0.5141, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8945686900958466, | |
| "grad_norm": 0.9808941118590042, | |
| "learning_rate": 1.7702762041418e-05, | |
| "loss": 0.5436, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8977635782747604, | |
| "grad_norm": 0.9347484669225299, | |
| "learning_rate": 1.7678999672905167e-05, | |
| "loss": 0.5469, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9009584664536742, | |
| "grad_norm": 0.7877300604114963, | |
| "learning_rate": 1.7655131161607887e-05, | |
| "loss": 0.477, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9041533546325878, | |
| "grad_norm": 0.9228991931025834, | |
| "learning_rate": 1.7631156837448063e-05, | |
| "loss": 0.4949, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9073482428115016, | |
| "grad_norm": 0.8738629943355437, | |
| "learning_rate": 1.7607077031810204e-05, | |
| "loss": 0.4885, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.9105431309904153, | |
| "grad_norm": 0.9228825501229833, | |
| "learning_rate": 1.7582892077536832e-05, | |
| "loss": 0.5402, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9137380191693291, | |
| "grad_norm": 0.7810257376041579, | |
| "learning_rate": 1.755860230892389e-05, | |
| "loss": 0.5139, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9169329073482428, | |
| "grad_norm": 0.8073649232141846, | |
| "learning_rate": 1.75342080617161e-05, | |
| "loss": 0.5388, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9201277955271565, | |
| "grad_norm": 0.865996445702188, | |
| "learning_rate": 1.750970967310235e-05, | |
| "loss": 0.5097, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.9233226837060703, | |
| "grad_norm": 0.7542868702308059, | |
| "learning_rate": 1.7485107481711014e-05, | |
| "loss": 0.4671, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9265175718849841, | |
| "grad_norm": 1.0625341412401543, | |
| "learning_rate": 1.7460401827605277e-05, | |
| "loss": 0.5505, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9297124600638977, | |
| "grad_norm": 0.8122297211204726, | |
| "learning_rate": 1.7435593052278437e-05, | |
| "loss": 0.4982, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.9329073482428115, | |
| "grad_norm": 0.946574361770195, | |
| "learning_rate": 1.741068149864918e-05, | |
| "loss": 0.5131, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.9361022364217252, | |
| "grad_norm": 1.0835158989817582, | |
| "learning_rate": 1.7385667511056853e-05, | |
| "loss": 0.5345, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.939297124600639, | |
| "grad_norm": 0.8670067651200516, | |
| "learning_rate": 1.7360551435256673e-05, | |
| "loss": 0.508, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.9424920127795527, | |
| "grad_norm": 0.9387855819886006, | |
| "learning_rate": 1.733533361841499e-05, | |
| "loss": 0.5033, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.9456869009584664, | |
| "grad_norm": 0.9542224381022377, | |
| "learning_rate": 1.7310014409104456e-05, | |
| "loss": 0.5782, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.9488817891373802, | |
| "grad_norm": 0.87126390248402, | |
| "learning_rate": 1.728459415729922e-05, | |
| "loss": 0.4991, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.952076677316294, | |
| "grad_norm": 0.8979696763884574, | |
| "learning_rate": 1.7259073214370085e-05, | |
| "loss": 0.5262, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9552715654952076, | |
| "grad_norm": 0.8097687637292601, | |
| "learning_rate": 1.7233451933079663e-05, | |
| "loss": 0.5198, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9584664536741214, | |
| "grad_norm": 0.904220596008717, | |
| "learning_rate": 1.720773066757748e-05, | |
| "loss": 0.5044, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9616613418530351, | |
| "grad_norm": 0.9163146597366306, | |
| "learning_rate": 1.7181909773395108e-05, | |
| "loss": 0.4913, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9648562300319489, | |
| "grad_norm": 0.8142143382005672, | |
| "learning_rate": 1.715598960744121e-05, | |
| "loss": 0.5032, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9680511182108626, | |
| "grad_norm": 0.8738304555552013, | |
| "learning_rate": 1.7129970527996654e-05, | |
| "loss": 0.4819, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9712460063897763, | |
| "grad_norm": 0.7808301311984981, | |
| "learning_rate": 1.7103852894709517e-05, | |
| "loss": 0.4706, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9744408945686901, | |
| "grad_norm": 0.9343948345313137, | |
| "learning_rate": 1.707763706859015e-05, | |
| "loss": 0.4925, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9776357827476039, | |
| "grad_norm": 0.8406849688228994, | |
| "learning_rate": 1.7051323412006167e-05, | |
| "loss": 0.4857, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9808306709265175, | |
| "grad_norm": 0.8030422977982409, | |
| "learning_rate": 1.7024912288677434e-05, | |
| "loss": 0.5001, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9840255591054313, | |
| "grad_norm": 0.7637836848154631, | |
| "learning_rate": 1.6998404063671064e-05, | |
| "loss": 0.5012, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.987220447284345, | |
| "grad_norm": 0.8696703757631259, | |
| "learning_rate": 1.6971799103396332e-05, | |
| "loss": 0.512, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9904153354632588, | |
| "grad_norm": 0.926358777744064, | |
| "learning_rate": 1.6945097775599667e-05, | |
| "loss": 0.4864, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9936102236421726, | |
| "grad_norm": 0.7337895110591559, | |
| "learning_rate": 1.69183004493595e-05, | |
| "loss": 0.4719, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9968051118210862, | |
| "grad_norm": 0.9274672714820749, | |
| "learning_rate": 1.6891407495081228e-05, | |
| "loss": 0.5792, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.8821002895164746, | |
| "learning_rate": 1.686441928449204e-05, | |
| "loss": 0.5718, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.0031948881789137, | |
| "grad_norm": 0.9737433871203176, | |
| "learning_rate": 1.6837336190635824e-05, | |
| "loss": 0.4178, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.0063897763578276, | |
| "grad_norm": 0.8184630714297582, | |
| "learning_rate": 1.6810158587867973e-05, | |
| "loss": 0.423, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.0095846645367412, | |
| "grad_norm": 0.7010267787641967, | |
| "learning_rate": 1.6782886851850237e-05, | |
| "loss": 0.3532, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.012779552715655, | |
| "grad_norm": 0.7552621709700076, | |
| "learning_rate": 1.675552135954552e-05, | |
| "loss": 0.3665, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.0159744408945688, | |
| "grad_norm": 0.8245979183281297, | |
| "learning_rate": 1.672806248921266e-05, | |
| "loss": 0.3836, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.0191693290734825, | |
| "grad_norm": 0.8683334721491652, | |
| "learning_rate": 1.6700510620401223e-05, | |
| "loss": 0.3768, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.0223642172523961, | |
| "grad_norm": 0.9751233496086866, | |
| "learning_rate": 1.6672866133946244e-05, | |
| "loss": 0.4059, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0255591054313098, | |
| "grad_norm": 0.838948811266222, | |
| "learning_rate": 1.664512941196295e-05, | |
| "loss": 0.4057, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.0287539936102237, | |
| "grad_norm": 0.9443479654525763, | |
| "learning_rate": 1.6617300837841502e-05, | |
| "loss": 0.4334, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.0319488817891374, | |
| "grad_norm": 0.8658369811689361, | |
| "learning_rate": 1.6589380796241687e-05, | |
| "loss": 0.3821, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.035143769968051, | |
| "grad_norm": 0.8860556738851113, | |
| "learning_rate": 1.6561369673087588e-05, | |
| "loss": 0.3899, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.038338658146965, | |
| "grad_norm": 0.6682829096727095, | |
| "learning_rate": 1.653326785556227e-05, | |
| "loss": 0.3522, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.0415335463258786, | |
| "grad_norm": 0.7410215144498764, | |
| "learning_rate": 1.650507573210242e-05, | |
| "loss": 0.3838, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.0447284345047922, | |
| "grad_norm": 0.9609136708860833, | |
| "learning_rate": 1.6476793692392966e-05, | |
| "loss": 0.4566, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.0479233226837061, | |
| "grad_norm": 0.6908849716070313, | |
| "learning_rate": 1.6448422127361707e-05, | |
| "loss": 0.3501, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.0511182108626198, | |
| "grad_norm": 0.837313532223091, | |
| "learning_rate": 1.641996142917391e-05, | |
| "loss": 0.382, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.0543130990415335, | |
| "grad_norm": 0.834950559417814, | |
| "learning_rate": 1.6391411991226873e-05, | |
| "loss": 0.4044, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0575079872204474, | |
| "grad_norm": 0.8560681611142513, | |
| "learning_rate": 1.6362774208144504e-05, | |
| "loss": 0.435, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.060702875399361, | |
| "grad_norm": 0.8646105738148943, | |
| "learning_rate": 1.6334048475771855e-05, | |
| "loss": 0.3865, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0638977635782747, | |
| "grad_norm": 0.9206127193709772, | |
| "learning_rate": 1.6305235191169654e-05, | |
| "loss": 0.4109, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0670926517571886, | |
| "grad_norm": 0.7719035201516966, | |
| "learning_rate": 1.6276334752608823e-05, | |
| "loss": 0.356, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0702875399361023, | |
| "grad_norm": 1.1225193958448179, | |
| "learning_rate": 1.6247347559564962e-05, | |
| "loss": 0.3787, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.073482428115016, | |
| "grad_norm": 0.8380968241340917, | |
| "learning_rate": 1.621827401271284e-05, | |
| "loss": 0.3143, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0766773162939298, | |
| "grad_norm": 0.8303781325962523, | |
| "learning_rate": 1.6189114513920838e-05, | |
| "loss": 0.384, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0798722044728435, | |
| "grad_norm": 1.129732310070753, | |
| "learning_rate": 1.6159869466245416e-05, | |
| "loss": 0.3807, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0830670926517572, | |
| "grad_norm": 0.7401369861082813, | |
| "learning_rate": 1.613053927392553e-05, | |
| "loss": 0.4185, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0862619808306708, | |
| "grad_norm": 0.795100265885747, | |
| "learning_rate": 1.610112434237704e-05, | |
| "loss": 0.4263, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0894568690095847, | |
| "grad_norm": 0.9130074370141869, | |
| "learning_rate": 1.6071625078187113e-05, | |
| "loss": 0.4352, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0926517571884984, | |
| "grad_norm": 0.7978114041961164, | |
| "learning_rate": 1.604204188910861e-05, | |
| "loss": 0.3718, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.095846645367412, | |
| "grad_norm": 0.7906733557235865, | |
| "learning_rate": 1.601237518405443e-05, | |
| "loss": 0.4199, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.099041533546326, | |
| "grad_norm": 0.7427188221038984, | |
| "learning_rate": 1.5982625373091877e-05, | |
| "loss": 0.3778, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.1022364217252396, | |
| "grad_norm": 0.8721565333693361, | |
| "learning_rate": 1.5952792867436984e-05, | |
| "loss": 0.3701, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.1054313099041533, | |
| "grad_norm": 0.8525717279830611, | |
| "learning_rate": 1.5922878079448827e-05, | |
| "loss": 0.4079, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.1086261980830672, | |
| "grad_norm": 0.9085260822651806, | |
| "learning_rate": 1.5892881422623826e-05, | |
| "loss": 0.3416, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.1118210862619808, | |
| "grad_norm": 0.844899253921564, | |
| "learning_rate": 1.5862803311590043e-05, | |
| "loss": 0.3841, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.1150159744408945, | |
| "grad_norm": 0.8533360961376679, | |
| "learning_rate": 1.5832644162101417e-05, | |
| "loss": 0.4002, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.1182108626198084, | |
| "grad_norm": 0.8830687311970081, | |
| "learning_rate": 1.580240439103206e-05, | |
| "loss": 0.3471, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.121405750798722, | |
| "grad_norm": 0.7529407391712328, | |
| "learning_rate": 1.577208441637046e-05, | |
| "loss": 0.3819, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.1246006389776357, | |
| "grad_norm": 0.926454792722682, | |
| "learning_rate": 1.5741684657213726e-05, | |
| "loss": 0.373, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.1277955271565494, | |
| "grad_norm": 0.8653791591805564, | |
| "learning_rate": 1.571120553376178e-05, | |
| "loss": 0.3994, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.1309904153354633, | |
| "grad_norm": 0.6366340699520703, | |
| "learning_rate": 1.568064746731156e-05, | |
| "loss": 0.3566, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.134185303514377, | |
| "grad_norm": 0.8133151809880976, | |
| "learning_rate": 1.565001088025119e-05, | |
| "loss": 0.3836, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.1373801916932909, | |
| "grad_norm": 0.795384552885173, | |
| "learning_rate": 1.5619296196054135e-05, | |
| "loss": 0.3711, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.1405750798722045, | |
| "grad_norm": 0.7607341893849747, | |
| "learning_rate": 1.5588503839273373e-05, | |
| "loss": 0.4241, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.1437699680511182, | |
| "grad_norm": 0.7704534797439848, | |
| "learning_rate": 1.5557634235535496e-05, | |
| "loss": 0.3941, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.1469648562300319, | |
| "grad_norm": 0.7128626541107346, | |
| "learning_rate": 1.552668781153484e-05, | |
| "loss": 0.341, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.1501597444089458, | |
| "grad_norm": 0.763750359311604, | |
| "learning_rate": 1.5495664995027594e-05, | |
| "loss": 0.3727, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.1533546325878594, | |
| "grad_norm": 0.7417308830642706, | |
| "learning_rate": 1.5464566214825876e-05, | |
| "loss": 0.4052, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.156549520766773, | |
| "grad_norm": 0.7993216873695975, | |
| "learning_rate": 1.543339190079182e-05, | |
| "loss": 0.3959, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.159744408945687, | |
| "grad_norm": 0.7715779361907723, | |
| "learning_rate": 1.5402142483831607e-05, | |
| "loss": 0.403, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.1629392971246006, | |
| "grad_norm": 0.8331596188661798, | |
| "learning_rate": 1.5370818395889536e-05, | |
| "loss": 0.3796, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1661341853035143, | |
| "grad_norm": 0.8303258308472176, | |
| "learning_rate": 1.533942006994205e-05, | |
| "loss": 0.3828, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.1693290734824282, | |
| "grad_norm": 0.7308405138430362, | |
| "learning_rate": 1.530794793999173e-05, | |
| "loss": 0.3739, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.1725239616613419, | |
| "grad_norm": 0.6670562893393245, | |
| "learning_rate": 1.527640244106133e-05, | |
| "loss": 0.4019, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1757188498402555, | |
| "grad_norm": 0.8948726572245147, | |
| "learning_rate": 1.5244784009187724e-05, | |
| "loss": 0.3774, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1789137380191694, | |
| "grad_norm": 0.7894907155797702, | |
| "learning_rate": 1.521309308141592e-05, | |
| "loss": 0.3486, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.182108626198083, | |
| "grad_norm": 0.7556853574798593, | |
| "learning_rate": 1.518133009579298e-05, | |
| "loss": 0.3935, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1853035143769968, | |
| "grad_norm": 0.8278844793998316, | |
| "learning_rate": 1.5149495491362e-05, | |
| "loss": 0.4155, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1884984025559104, | |
| "grad_norm": 0.8208958496385201, | |
| "learning_rate": 1.5117589708156013e-05, | |
| "loss": 0.3603, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1916932907348243, | |
| "grad_norm": 0.7236738730838786, | |
| "learning_rate": 1.5085613187191926e-05, | |
| "loss": 0.3986, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.194888178913738, | |
| "grad_norm": 0.8755456163986371, | |
| "learning_rate": 1.5053566370464416e-05, | |
| "loss": 0.3945, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.1980830670926517, | |
| "grad_norm": 0.7911158266689877, | |
| "learning_rate": 1.502144970093982e-05, | |
| "loss": 0.3812, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.2012779552715656, | |
| "grad_norm": 0.7551583697235154, | |
| "learning_rate": 1.4989263622550017e-05, | |
| "loss": 0.3637, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.2044728434504792, | |
| "grad_norm": 0.7703326368866282, | |
| "learning_rate": 1.4957008580186276e-05, | |
| "loss": 0.3958, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.207667731629393, | |
| "grad_norm": 0.8019207400958424, | |
| "learning_rate": 1.4924685019693142e-05, | |
| "loss": 0.3753, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.2108626198083068, | |
| "grad_norm": 0.7447320316646009, | |
| "learning_rate": 1.4892293387862221e-05, | |
| "loss": 0.3678, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.2140575079872205, | |
| "grad_norm": 0.8236833165393697, | |
| "learning_rate": 1.485983413242606e-05, | |
| "loss": 0.4608, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2172523961661341, | |
| "grad_norm": 0.809350983162518, | |
| "learning_rate": 1.4827307702051919e-05, | |
| "loss": 0.4427, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.220447284345048, | |
| "grad_norm": 0.6761221914505472, | |
| "learning_rate": 1.4794714546335578e-05, | |
| "loss": 0.4402, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.2236421725239617, | |
| "grad_norm": 0.7020134281897162, | |
| "learning_rate": 1.4762055115795136e-05, | |
| "loss": 0.3388, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.2268370607028753, | |
| "grad_norm": 0.8020099653178805, | |
| "learning_rate": 1.472932986186477e-05, | |
| "loss": 0.4078, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.230031948881789, | |
| "grad_norm": 0.8929243606671824, | |
| "learning_rate": 1.4696539236888495e-05, | |
| "loss": 0.4397, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.233226837060703, | |
| "grad_norm": 0.854936507274674, | |
| "learning_rate": 1.4663683694113924e-05, | |
| "loss": 0.4025, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.2364217252396166, | |
| "grad_norm": 0.8116806635693596, | |
| "learning_rate": 1.463076368768599e-05, | |
| "loss": 0.357, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.2396166134185305, | |
| "grad_norm": 0.7525728570276254, | |
| "learning_rate": 1.4597779672640668e-05, | |
| "loss": 0.4191, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.2428115015974441, | |
| "grad_norm": 0.8072471116119602, | |
| "learning_rate": 1.4564732104898702e-05, | |
| "loss": 0.3903, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.2460063897763578, | |
| "grad_norm": 0.7161886277690318, | |
| "learning_rate": 1.4531621441259285e-05, | |
| "loss": 0.4142, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.2492012779552715, | |
| "grad_norm": 0.7547658320106545, | |
| "learning_rate": 1.4498448139393752e-05, | |
| "loss": 0.398, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.2523961661341854, | |
| "grad_norm": 0.7773591617963772, | |
| "learning_rate": 1.4465212657839254e-05, | |
| "loss": 0.4327, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.255591054313099, | |
| "grad_norm": 0.6904639860069021, | |
| "learning_rate": 1.4431915455992416e-05, | |
| "loss": 0.3591, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.2587859424920127, | |
| "grad_norm": 0.7049006865325205, | |
| "learning_rate": 1.4398556994102996e-05, | |
| "loss": 0.4282, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.2619808306709266, | |
| "grad_norm": 0.7175700995734923, | |
| "learning_rate": 1.4365137733267514e-05, | |
| "loss": 0.3281, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.2651757188498403, | |
| "grad_norm": 0.8073941319432364, | |
| "learning_rate": 1.433165813542288e-05, | |
| "loss": 0.3913, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.268370607028754, | |
| "grad_norm": 0.7003414727566679, | |
| "learning_rate": 1.4298118663340018e-05, | |
| "loss": 0.3638, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.2715654952076676, | |
| "grad_norm": 0.7435882291411996, | |
| "learning_rate": 1.4264519780617452e-05, | |
| "loss": 0.3795, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.2747603833865815, | |
| "grad_norm": 0.9193494825848232, | |
| "learning_rate": 1.4230861951674914e-05, | |
| "loss": 0.3923, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.2779552715654952, | |
| "grad_norm": 0.7316900213493632, | |
| "learning_rate": 1.4197145641746923e-05, | |
| "loss": 0.4167, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.281150159744409, | |
| "grad_norm": 0.7980774035403849, | |
| "learning_rate": 1.4163371316876347e-05, | |
| "loss": 0.3917, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2843450479233227, | |
| "grad_norm": 0.8480589998659835, | |
| "learning_rate": 1.412953944390795e-05, | |
| "loss": 0.4241, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2875399361022364, | |
| "grad_norm": 0.7315237373460427, | |
| "learning_rate": 1.4095650490481978e-05, | |
| "loss": 0.4207, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.29073482428115, | |
| "grad_norm": 0.6375027271180014, | |
| "learning_rate": 1.4061704925027653e-05, | |
| "loss": 0.4, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.293929712460064, | |
| "grad_norm": 0.8134150199474265, | |
| "learning_rate": 1.4027703216756718e-05, | |
| "loss": 0.3669, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.2971246006389776, | |
| "grad_norm": 0.7898379126973761, | |
| "learning_rate": 1.3993645835656955e-05, | |
| "loss": 0.4186, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.3003194888178915, | |
| "grad_norm": 0.7703707080336293, | |
| "learning_rate": 1.3959533252485678e-05, | |
| "loss": 0.3739, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.3035143769968052, | |
| "grad_norm": 0.7611127075634048, | |
| "learning_rate": 1.3925365938763227e-05, | |
| "loss": 0.3894, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.3067092651757188, | |
| "grad_norm": 0.5958132128301956, | |
| "learning_rate": 1.3891144366766457e-05, | |
| "loss": 0.3959, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.3099041533546325, | |
| "grad_norm": 0.6334352951724201, | |
| "learning_rate": 1.3856869009522212e-05, | |
| "loss": 0.3619, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.3130990415335464, | |
| "grad_norm": 0.7130135068724587, | |
| "learning_rate": 1.382254034080077e-05, | |
| "loss": 0.386, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.31629392971246, | |
| "grad_norm": 0.7032314829984485, | |
| "learning_rate": 1.3788158835109313e-05, | |
| "loss": 0.3893, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.3194888178913737, | |
| "grad_norm": 0.7185920347681584, | |
| "learning_rate": 1.3753724967685363e-05, | |
| "loss": 0.3601, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.3226837060702876, | |
| "grad_norm": 0.6332079804609233, | |
| "learning_rate": 1.3719239214490203e-05, | |
| "loss": 0.4075, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.3258785942492013, | |
| "grad_norm": 0.7404658527045683, | |
| "learning_rate": 1.368470205220231e-05, | |
| "loss": 0.3993, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.329073482428115, | |
| "grad_norm": 0.7103673991700076, | |
| "learning_rate": 1.3650113958210764e-05, | |
| "loss": 0.4013, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.3322683706070286, | |
| "grad_norm": 0.7999109629095718, | |
| "learning_rate": 1.3615475410608647e-05, | |
| "loss": 0.3968, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.3354632587859425, | |
| "grad_norm": 0.7066086147191345, | |
| "learning_rate": 1.3580786888186428e-05, | |
| "loss": 0.4061, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.3386581469648562, | |
| "grad_norm": 0.8014839647186196, | |
| "learning_rate": 1.3546048870425356e-05, | |
| "loss": 0.419, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.34185303514377, | |
| "grad_norm": 0.5901005264351044, | |
| "learning_rate": 1.3511261837490837e-05, | |
| "loss": 0.4084, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.3450479233226837, | |
| "grad_norm": 0.739391619088848, | |
| "learning_rate": 1.3476426270225768e-05, | |
| "loss": 0.3988, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.3482428115015974, | |
| "grad_norm": 0.8316348210677322, | |
| "learning_rate": 1.344154265014393e-05, | |
| "loss": 0.3769, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.351437699680511, | |
| "grad_norm": 0.7970493555353015, | |
| "learning_rate": 1.3406611459423306e-05, | |
| "loss": 0.3849, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.354632587859425, | |
| "grad_norm": 0.7564976916720336, | |
| "learning_rate": 1.3371633180899417e-05, | |
| "loss": 0.4197, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.3578274760383386, | |
| "grad_norm": 0.961478015601037, | |
| "learning_rate": 1.3336608298058662e-05, | |
| "loss": 0.3626, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.3610223642172525, | |
| "grad_norm": 0.6797153190498834, | |
| "learning_rate": 1.3301537295031626e-05, | |
| "loss": 0.3978, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.3642172523961662, | |
| "grad_norm": 0.7404534217433801, | |
| "learning_rate": 1.326642065658638e-05, | |
| "loss": 0.3743, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.3674121405750799, | |
| "grad_norm": 0.7466684080396095, | |
| "learning_rate": 1.3231258868121806e-05, | |
| "loss": 0.3951, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.3706070287539935, | |
| "grad_norm": 0.804922013834555, | |
| "learning_rate": 1.3196052415660856e-05, | |
| "loss": 0.3805, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.3738019169329074, | |
| "grad_norm": 0.7133222438264706, | |
| "learning_rate": 1.3160801785843857e-05, | |
| "loss": 0.365, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.376996805111821, | |
| "grad_norm": 0.6606882521977088, | |
| "learning_rate": 1.3125507465921775e-05, | |
| "loss": 0.3718, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.3801916932907348, | |
| "grad_norm": 0.8048485138423717, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.348, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.3833865814696487, | |
| "grad_norm": 0.7057236132502058, | |
| "learning_rate": 1.3054789707778998e-05, | |
| "loss": 0.3768, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.3865814696485623, | |
| "grad_norm": 0.7427223594984532, | |
| "learning_rate": 1.3019367247052781e-05, | |
| "loss": 0.4046, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.389776357827476, | |
| "grad_norm": 0.777982348919025, | |
| "learning_rate": 1.2983903051196922e-05, | |
| "loss": 0.4084, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3929712460063897, | |
| "grad_norm": 0.7920793454584819, | |
| "learning_rate": 1.2948397610414393e-05, | |
| "loss": 0.3788, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3961661341853036, | |
| "grad_norm": 0.8369304632263217, | |
| "learning_rate": 1.291285141547828e-05, | |
| "loss": 0.3867, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.3993610223642172, | |
| "grad_norm": 0.7008958369408834, | |
| "learning_rate": 1.287726495772499e-05, | |
| "loss": 0.357, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.4025559105431311, | |
| "grad_norm": 0.880961196911694, | |
| "learning_rate": 1.2841638729047463e-05, | |
| "loss": 0.3871, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.4057507987220448, | |
| "grad_norm": 0.7018004421621855, | |
| "learning_rate": 1.2805973221888366e-05, | |
| "loss": 0.392, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.4089456869009584, | |
| "grad_norm": 0.6646094458865319, | |
| "learning_rate": 1.2770268929233298e-05, | |
| "loss": 0.4033, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.4121405750798721, | |
| "grad_norm": 0.7343716165782984, | |
| "learning_rate": 1.273452634460397e-05, | |
| "loss": 0.3523, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.415335463258786, | |
| "grad_norm": 0.6140404980778273, | |
| "learning_rate": 1.2698745962051379e-05, | |
| "loss": 0.3827, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.4185303514376997, | |
| "grad_norm": 0.7817854886271103, | |
| "learning_rate": 1.2662928276148985e-05, | |
| "loss": 0.3811, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.4217252396166133, | |
| "grad_norm": 0.7446921251394508, | |
| "learning_rate": 1.262707378198587e-05, | |
| "loss": 0.347, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.4249201277955272, | |
| "grad_norm": 0.7985988097138982, | |
| "learning_rate": 1.25911829751599e-05, | |
| "loss": 0.4242, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.428115015974441, | |
| "grad_norm": 0.6490635913633626, | |
| "learning_rate": 1.2555256351770873e-05, | |
| "loss": 0.3819, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.4313099041533546, | |
| "grad_norm": 0.8958688000041772, | |
| "learning_rate": 1.2519294408413655e-05, | |
| "loss": 0.4268, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.4345047923322682, | |
| "grad_norm": 0.7760938214202414, | |
| "learning_rate": 1.2483297642171332e-05, | |
| "loss": 0.3781, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.4376996805111821, | |
| "grad_norm": 0.6672706606455567, | |
| "learning_rate": 1.2447266550608315e-05, | |
| "loss": 0.3916, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.4408945686900958, | |
| "grad_norm": 0.783741926506166, | |
| "learning_rate": 1.2411201631763483e-05, | |
| "loss": 0.3712, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.4440894568690097, | |
| "grad_norm": 0.9120231546467329, | |
| "learning_rate": 1.2375103384143297e-05, | |
| "loss": 0.3636, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.4472843450479234, | |
| "grad_norm": 0.7973665501327444, | |
| "learning_rate": 1.2338972306714889e-05, | |
| "loss": 0.3368, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.450479233226837, | |
| "grad_norm": 0.6655599307050887, | |
| "learning_rate": 1.23028088988992e-05, | |
| "loss": 0.3629, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.4536741214057507, | |
| "grad_norm": 0.7334761926748368, | |
| "learning_rate": 1.2266613660564042e-05, | |
| "loss": 0.4479, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.4568690095846646, | |
| "grad_norm": 0.7535020493818221, | |
| "learning_rate": 1.2230387092017204e-05, | |
| "loss": 0.393, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.4600638977635783, | |
| "grad_norm": 0.7490052786483912, | |
| "learning_rate": 1.2194129693999549e-05, | |
| "loss": 0.389, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.4632587859424921, | |
| "grad_norm": 0.7487324907432512, | |
| "learning_rate": 1.2157841967678064e-05, | |
| "loss": 0.3936, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.4664536741214058, | |
| "grad_norm": 0.6713166876605019, | |
| "learning_rate": 1.2121524414638958e-05, | |
| "loss": 0.4003, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.4696485623003195, | |
| "grad_norm": 0.6898492721518805, | |
| "learning_rate": 1.2085177536880717e-05, | |
| "loss": 0.3879, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.4728434504792332, | |
| "grad_norm": 0.6341478496791327, | |
| "learning_rate": 1.2048801836807162e-05, | |
| "loss": 0.3638, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.476038338658147, | |
| "grad_norm": 0.7101904301148986, | |
| "learning_rate": 1.2012397817220522e-05, | |
| "loss": 0.3399, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.4792332268370607, | |
| "grad_norm": 0.8232525976178354, | |
| "learning_rate": 1.1975965981314459e-05, | |
| "loss": 0.4104, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.4824281150159744, | |
| "grad_norm": 0.7043019842589794, | |
| "learning_rate": 1.1939506832667129e-05, | |
| "loss": 0.4181, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.4856230031948883, | |
| "grad_norm": 0.7083596672114337, | |
| "learning_rate": 1.190302087523422e-05, | |
| "loss": 0.3815, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.488817891373802, | |
| "grad_norm": 0.6847992152093964, | |
| "learning_rate": 1.186650861334199e-05, | |
| "loss": 0.3866, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.4920127795527156, | |
| "grad_norm": 0.6177262873949391, | |
| "learning_rate": 1.182997055168027e-05, | |
| "loss": 0.3975, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.4952076677316293, | |
| "grad_norm": 0.7517416052088943, | |
| "learning_rate": 1.179340719529553e-05, | |
| "loss": 0.3694, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4984025559105432, | |
| "grad_norm": 0.7414131237861542, | |
| "learning_rate": 1.1756819049583861e-05, | |
| "loss": 0.4014, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.5015974440894568, | |
| "grad_norm": 0.7428769885232025, | |
| "learning_rate": 1.1720206620284011e-05, | |
| "loss": 0.377, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5047923322683707, | |
| "grad_norm": 0.7208343616237635, | |
| "learning_rate": 1.1683570413470384e-05, | |
| "loss": 0.3618, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.5079872204472844, | |
| "grad_norm": 0.6254200530335278, | |
| "learning_rate": 1.1646910935546055e-05, | |
| "loss": 0.354, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.511182108626198, | |
| "grad_norm": 0.7137137920366021, | |
| "learning_rate": 1.1610228693235748e-05, | |
| "loss": 0.391, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.5143769968051117, | |
| "grad_norm": 0.7835829374466873, | |
| "learning_rate": 1.1573524193578863e-05, | |
| "loss": 0.3794, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.5175718849840254, | |
| "grad_norm": 0.6785632963125671, | |
| "learning_rate": 1.1536797943922442e-05, | |
| "loss": 0.3203, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.5207667731629393, | |
| "grad_norm": 0.6482994241562808, | |
| "learning_rate": 1.1500050451914171e-05, | |
| "loss": 0.3735, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.5239616613418532, | |
| "grad_norm": 0.6552308867215125, | |
| "learning_rate": 1.1463282225495358e-05, | |
| "loss": 0.3885, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.5271565495207668, | |
| "grad_norm": 0.6081556542356011, | |
| "learning_rate": 1.1426493772893907e-05, | |
| "loss": 0.3824, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.5303514376996805, | |
| "grad_norm": 0.6578350633788698, | |
| "learning_rate": 1.1389685602617302e-05, | |
| "loss": 0.3937, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.5335463258785942, | |
| "grad_norm": 0.6548231568961034, | |
| "learning_rate": 1.1352858223445571e-05, | |
| "loss": 0.4007, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.5367412140575079, | |
| "grad_norm": 0.6863453973626347, | |
| "learning_rate": 1.1316012144424265e-05, | |
| "loss": 0.421, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.5399361022364217, | |
| "grad_norm": 0.6349909711548734, | |
| "learning_rate": 1.1279147874857397e-05, | |
| "loss": 0.4273, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.5431309904153354, | |
| "grad_norm": 0.6681450715179307, | |
| "learning_rate": 1.1242265924300433e-05, | |
| "loss": 0.3429, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.5463258785942493, | |
| "grad_norm": 0.6888043347361502, | |
| "learning_rate": 1.1205366802553231e-05, | |
| "loss": 0.3975, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.549520766773163, | |
| "grad_norm": 0.7223078250216108, | |
| "learning_rate": 1.1168451019652995e-05, | |
| "loss": 0.3613, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.5527156549520766, | |
| "grad_norm": 0.8636641582227061, | |
| "learning_rate": 1.1131519085867223e-05, | |
| "loss": 0.3823, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.5559105431309903, | |
| "grad_norm": 0.6802538435332052, | |
| "learning_rate": 1.1094571511686669e-05, | |
| "loss": 0.3921, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.5591054313099042, | |
| "grad_norm": 0.6821928369769188, | |
| "learning_rate": 1.1057608807818267e-05, | |
| "loss": 0.3705, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.5623003194888179, | |
| "grad_norm": 0.6868604612664317, | |
| "learning_rate": 1.1020631485178084e-05, | |
| "loss": 0.4362, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.5654952076677318, | |
| "grad_norm": 0.6814388248892118, | |
| "learning_rate": 1.0983640054884252e-05, | |
| "loss": 0.3445, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.5686900958466454, | |
| "grad_norm": 0.6238777532494983, | |
| "learning_rate": 1.0946635028249916e-05, | |
| "loss": 0.3926, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.571884984025559, | |
| "grad_norm": 0.7288697316229533, | |
| "learning_rate": 1.0909616916776138e-05, | |
| "loss": 0.325, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.5750798722044728, | |
| "grad_norm": 0.5919468997525036, | |
| "learning_rate": 1.0872586232144861e-05, | |
| "loss": 0.4112, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.5782747603833864, | |
| "grad_norm": 0.6404385296704492, | |
| "learning_rate": 1.0835543486211815e-05, | |
| "loss": 0.3608, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.5814696485623003, | |
| "grad_norm": 0.6699847382159787, | |
| "learning_rate": 1.0798489190999441e-05, | |
| "loss": 0.3565, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.5846645367412142, | |
| "grad_norm": 0.6843597801238872, | |
| "learning_rate": 1.076142385868983e-05, | |
| "loss": 0.3909, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.5878594249201279, | |
| "grad_norm": 0.634709992907837, | |
| "learning_rate": 1.0724348001617626e-05, | |
| "loss": 0.3873, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.5910543130990416, | |
| "grad_norm": 0.6548440586247501, | |
| "learning_rate": 1.0687262132262952e-05, | |
| "loss": 0.3983, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.5942492012779552, | |
| "grad_norm": 0.642604066379002, | |
| "learning_rate": 1.065016676324433e-05, | |
| "loss": 0.3682, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.5974440894568689, | |
| "grad_norm": 0.7073578469704601, | |
| "learning_rate": 1.061306240731158e-05, | |
| "loss": 0.4251, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6006389776357828, | |
| "grad_norm": 0.777600887075542, | |
| "learning_rate": 1.057594957733876e-05, | |
| "loss": 0.4026, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.6038338658146964, | |
| "grad_norm": 0.7531810084419379, | |
| "learning_rate": 1.0538828786317046e-05, | |
| "loss": 0.3423, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.6070287539936103, | |
| "grad_norm": 0.6079896810219849, | |
| "learning_rate": 1.0501700547347662e-05, | |
| "loss": 0.3497, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.610223642172524, | |
| "grad_norm": 0.7036526411507877, | |
| "learning_rate": 1.0464565373634784e-05, | |
| "loss": 0.376, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.6134185303514377, | |
| "grad_norm": 0.6979697325962348, | |
| "learning_rate": 1.0427423778478428e-05, | |
| "loss": 0.3836, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.6166134185303513, | |
| "grad_norm": 0.7415189360097981, | |
| "learning_rate": 1.039027627526739e-05, | |
| "loss": 0.4158, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.619808306709265, | |
| "grad_norm": 0.6308217438942585, | |
| "learning_rate": 1.035312337747212e-05, | |
| "loss": 0.3668, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.623003194888179, | |
| "grad_norm": 0.7294035513207259, | |
| "learning_rate": 1.0315965598637634e-05, | |
| "loss": 0.3341, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.6261980830670928, | |
| "grad_norm": 0.6726328452330528, | |
| "learning_rate": 1.0278803452376416e-05, | |
| "loss": 0.353, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.6293929712460065, | |
| "grad_norm": 0.7190369852430928, | |
| "learning_rate": 1.0241637452361323e-05, | |
| "loss": 0.3465, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.6325878594249201, | |
| "grad_norm": 0.7705034557107943, | |
| "learning_rate": 1.0204468112318479e-05, | |
| "loss": 0.4002, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.6357827476038338, | |
| "grad_norm": 0.7767754657361531, | |
| "learning_rate": 1.016729594602017e-05, | |
| "loss": 0.3735, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.6389776357827475, | |
| "grad_norm": 0.7826578684750594, | |
| "learning_rate": 1.0130121467277755e-05, | |
| "loss": 0.4192, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.6421725239616614, | |
| "grad_norm": 0.7359999647065305, | |
| "learning_rate": 1.0092945189934558e-05, | |
| "loss": 0.3698, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.645367412140575, | |
| "grad_norm": 0.596609232423867, | |
| "learning_rate": 1.0055767627858748e-05, | |
| "loss": 0.3738, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.648562300319489, | |
| "grad_norm": 0.6849486475833665, | |
| "learning_rate": 1.0018589294936273e-05, | |
| "loss": 0.3703, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.6517571884984026, | |
| "grad_norm": 0.7070173939461314, | |
| "learning_rate": 9.981410705063728e-06, | |
| "loss": 0.3497, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.6549520766773163, | |
| "grad_norm": 0.6510068381611003, | |
| "learning_rate": 9.944232372141252e-06, | |
| "loss": 0.3779, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.65814696485623, | |
| "grad_norm": 0.6408548186573009, | |
| "learning_rate": 9.907054810065446e-06, | |
| "loss": 0.3545, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.6613418530351438, | |
| "grad_norm": 0.6992267985738108, | |
| "learning_rate": 9.869878532722246e-06, | |
| "loss": 0.3819, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.6645367412140575, | |
| "grad_norm": 0.8229340777031916, | |
| "learning_rate": 9.832704053979828e-06, | |
| "loss": 0.3974, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.6677316293929714, | |
| "grad_norm": 0.6624955649595131, | |
| "learning_rate": 9.795531887681523e-06, | |
| "loss": 0.367, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.670926517571885, | |
| "grad_norm": 0.7223508220259945, | |
| "learning_rate": 9.75836254763868e-06, | |
| "loss": 0.3762, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.6741214057507987, | |
| "grad_norm": 0.666821717668502, | |
| "learning_rate": 9.721196547623585e-06, | |
| "loss": 0.4124, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.6773162939297124, | |
| "grad_norm": 0.6667393357744014, | |
| "learning_rate": 9.68403440136237e-06, | |
| "loss": 0.3474, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.680511182108626, | |
| "grad_norm": 0.7990772639121522, | |
| "learning_rate": 9.646876622527886e-06, | |
| "loss": 0.421, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.68370607028754, | |
| "grad_norm": 0.602666552687939, | |
| "learning_rate": 9.609723724732611e-06, | |
| "loss": 0.3308, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.6869009584664538, | |
| "grad_norm": 0.6268003913810553, | |
| "learning_rate": 9.572576221521574e-06, | |
| "loss": 0.416, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.6900958466453675, | |
| "grad_norm": 0.5993146844292886, | |
| "learning_rate": 9.535434626365221e-06, | |
| "loss": 0.3952, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.6932907348242812, | |
| "grad_norm": 0.600874293925157, | |
| "learning_rate": 9.49829945265234e-06, | |
| "loss": 0.3887, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.6964856230031948, | |
| "grad_norm": 0.8734215117818372, | |
| "learning_rate": 9.461171213682957e-06, | |
| "loss": 0.386, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.6996805111821085, | |
| "grad_norm": 0.722628235693675, | |
| "learning_rate": 9.424050422661243e-06, | |
| "loss": 0.4161, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.7028753993610224, | |
| "grad_norm": 0.6988954063267449, | |
| "learning_rate": 9.386937592688422e-06, | |
| "loss": 0.4119, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.706070287539936, | |
| "grad_norm": 0.7384635614942465, | |
| "learning_rate": 9.349833236755675e-06, | |
| "loss": 0.4044, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.70926517571885, | |
| "grad_norm": 0.7882261202758374, | |
| "learning_rate": 9.31273786773705e-06, | |
| "loss": 0.3701, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.7124600638977636, | |
| "grad_norm": 0.7035288193657737, | |
| "learning_rate": 9.275651998382377e-06, | |
| "loss": 0.3825, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.7156549520766773, | |
| "grad_norm": 0.6439779352342753, | |
| "learning_rate": 9.238576141310172e-06, | |
| "loss": 0.3596, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.718849840255591, | |
| "grad_norm": 0.6669213876269842, | |
| "learning_rate": 9.201510809000562e-06, | |
| "loss": 0.3482, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.7220447284345048, | |
| "grad_norm": 0.6933389154618239, | |
| "learning_rate": 9.164456513788186e-06, | |
| "loss": 0.3781, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.7252396166134185, | |
| "grad_norm": 0.7376867128140121, | |
| "learning_rate": 9.12741376785514e-06, | |
| "loss": 0.3964, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.7284345047923324, | |
| "grad_norm": 0.6705508648989672, | |
| "learning_rate": 9.090383083223866e-06, | |
| "loss": 0.4017, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.731629392971246, | |
| "grad_norm": 0.6576975605420414, | |
| "learning_rate": 9.053364971750087e-06, | |
| "loss": 0.3975, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.7348242811501597, | |
| "grad_norm": 0.6410586797875636, | |
| "learning_rate": 9.01635994511575e-06, | |
| "loss": 0.4352, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.7380191693290734, | |
| "grad_norm": 0.660772708577865, | |
| "learning_rate": 8.979368514821917e-06, | |
| "loss": 0.364, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.741214057507987, | |
| "grad_norm": 0.7048979114742526, | |
| "learning_rate": 8.942391192181735e-06, | |
| "loss": 0.4064, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.744408945686901, | |
| "grad_norm": 0.6113060002775187, | |
| "learning_rate": 8.905428488313335e-06, | |
| "loss": 0.3934, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.7476038338658149, | |
| "grad_norm": 0.7545453314822455, | |
| "learning_rate": 8.868480914132777e-06, | |
| "loss": 0.4098, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.7507987220447285, | |
| "grad_norm": 0.6514600293747569, | |
| "learning_rate": 8.831548980347009e-06, | |
| "loss": 0.4287, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.7539936102236422, | |
| "grad_norm": 0.6646813541705668, | |
| "learning_rate": 8.79463319744677e-06, | |
| "loss": 0.3431, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.7571884984025559, | |
| "grad_norm": 0.6154846894981999, | |
| "learning_rate": 8.757734075699567e-06, | |
| "loss": 0.4097, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.7603833865814695, | |
| "grad_norm": 0.6954784339750693, | |
| "learning_rate": 8.720852125142606e-06, | |
| "loss": 0.3757, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.7635782747603834, | |
| "grad_norm": 0.745806458726026, | |
| "learning_rate": 8.683987855575742e-06, | |
| "loss": 0.4016, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.766773162939297, | |
| "grad_norm": 0.6265426574548529, | |
| "learning_rate": 8.64714177655443e-06, | |
| "loss": 0.3306, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.769968051118211, | |
| "grad_norm": 0.6529854645903413, | |
| "learning_rate": 8.610314397382701e-06, | |
| "loss": 0.373, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.7731629392971247, | |
| "grad_norm": 0.7241000354204435, | |
| "learning_rate": 8.573506227106098e-06, | |
| "loss": 0.435, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.7763578274760383, | |
| "grad_norm": 0.6613396360810041, | |
| "learning_rate": 8.536717774504644e-06, | |
| "loss": 0.37, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.779552715654952, | |
| "grad_norm": 0.7090691178398771, | |
| "learning_rate": 8.49994954808583e-06, | |
| "loss": 0.3259, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.7827476038338657, | |
| "grad_norm": 0.7010800080078065, | |
| "learning_rate": 8.463202056077562e-06, | |
| "loss": 0.3763, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.7859424920127795, | |
| "grad_norm": 0.8135213458217521, | |
| "learning_rate": 8.426475806421139e-06, | |
| "loss": 0.3633, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.7891373801916934, | |
| "grad_norm": 0.8057923307232946, | |
| "learning_rate": 8.389771306764255e-06, | |
| "loss": 0.387, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.792332268370607, | |
| "grad_norm": 0.6868563818667783, | |
| "learning_rate": 8.353089064453949e-06, | |
| "loss": 0.3919, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.7955271565495208, | |
| "grad_norm": 0.729148092530284, | |
| "learning_rate": 8.316429586529616e-06, | |
| "loss": 0.3739, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.7987220447284344, | |
| "grad_norm": 0.7454589082195145, | |
| "learning_rate": 8.27979337971599e-06, | |
| "loss": 0.366, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.8019169329073481, | |
| "grad_norm": 0.7363609782622037, | |
| "learning_rate": 8.243180950416142e-06, | |
| "loss": 0.3746, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.805111821086262, | |
| "grad_norm": 0.7051907706488193, | |
| "learning_rate": 8.206592804704473e-06, | |
| "loss": 0.4004, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.8083067092651757, | |
| "grad_norm": 0.7077685360891058, | |
| "learning_rate": 8.170029448319732e-06, | |
| "loss": 0.3605, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.8115015974440896, | |
| "grad_norm": 0.630064546716315, | |
| "learning_rate": 8.133491386658016e-06, | |
| "loss": 0.3956, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.8146964856230032, | |
| "grad_norm": 0.7209999797137789, | |
| "learning_rate": 8.096979124765781e-06, | |
| "loss": 0.4296, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.817891373801917, | |
| "grad_norm": 0.7309587998391873, | |
| "learning_rate": 8.060493167332874e-06, | |
| "loss": 0.3941, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.8210862619808306, | |
| "grad_norm": 0.6946638921204976, | |
| "learning_rate": 8.024034018685546e-06, | |
| "loss": 0.3368, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.8242811501597445, | |
| "grad_norm": 0.6290744849288465, | |
| "learning_rate": 7.98760218277948e-06, | |
| "loss": 0.3967, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.8274760383386581, | |
| "grad_norm": 0.6756197738312434, | |
| "learning_rate": 7.95119816319284e-06, | |
| "loss": 0.4374, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.830670926517572, | |
| "grad_norm": 0.6195041582890459, | |
| "learning_rate": 7.914822463119285e-06, | |
| "loss": 0.3764, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.8338658146964857, | |
| "grad_norm": 0.6520016022036852, | |
| "learning_rate": 7.878475585361045e-06, | |
| "loss": 0.3697, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.8370607028753994, | |
| "grad_norm": 0.6673365035852971, | |
| "learning_rate": 7.84215803232194e-06, | |
| "loss": 0.3751, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.840255591054313, | |
| "grad_norm": 0.7467414573410552, | |
| "learning_rate": 7.805870306000453e-06, | |
| "loss": 0.4025, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.8434504792332267, | |
| "grad_norm": 0.7254888570735092, | |
| "learning_rate": 7.769612907982798e-06, | |
| "loss": 0.3572, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.8466453674121406, | |
| "grad_norm": 0.6517647297304884, | |
| "learning_rate": 7.733386339435965e-06, | |
| "loss": 0.3919, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.8498402555910545, | |
| "grad_norm": 0.5750405762377315, | |
| "learning_rate": 7.697191101100802e-06, | |
| "loss": 0.3996, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.8530351437699681, | |
| "grad_norm": 0.6982903562614436, | |
| "learning_rate": 7.661027693285113e-06, | |
| "loss": 0.3773, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.8562300319488818, | |
| "grad_norm": 0.6536016012178709, | |
| "learning_rate": 7.624896615856709e-06, | |
| "loss": 0.3519, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.8594249201277955, | |
| "grad_norm": 0.6044514365984781, | |
| "learning_rate": 7.588798368236517e-06, | |
| "loss": 0.3914, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.8626198083067091, | |
| "grad_norm": 0.6005793640293874, | |
| "learning_rate": 7.5527334493916894e-06, | |
| "loss": 0.3374, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.865814696485623, | |
| "grad_norm": 0.6837392480104575, | |
| "learning_rate": 7.516702357828672e-06, | |
| "loss": 0.3906, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.8690095846645367, | |
| "grad_norm": 0.6869443137528136, | |
| "learning_rate": 7.480705591586343e-06, | |
| "loss": 0.3648, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.8722044728434506, | |
| "grad_norm": 0.6640758656121128, | |
| "learning_rate": 7.444743648229129e-06, | |
| "loss": 0.3795, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.8753993610223643, | |
| "grad_norm": 0.711636064138404, | |
| "learning_rate": 7.408817024840103e-06, | |
| "loss": 0.3569, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.878594249201278, | |
| "grad_norm": 0.6951157527099808, | |
| "learning_rate": 7.372926218014131e-06, | |
| "loss": 0.3535, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.8817891373801916, | |
| "grad_norm": 0.6706065636049294, | |
| "learning_rate": 7.337071723851018e-06, | |
| "loss": 0.3598, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.8849840255591053, | |
| "grad_norm": 0.8241961745024526, | |
| "learning_rate": 7.301254037948624e-06, | |
| "loss": 0.4376, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.8881789137380192, | |
| "grad_norm": 0.7234855502758178, | |
| "learning_rate": 7.26547365539603e-06, | |
| "loss": 0.3529, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.891373801916933, | |
| "grad_norm": 0.6238866827009744, | |
| "learning_rate": 7.2297310707667036e-06, | |
| "loss": 0.4163, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.8945686900958467, | |
| "grad_norm": 0.5736271877912065, | |
| "learning_rate": 7.194026778111637e-06, | |
| "loss": 0.3587, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.8977635782747604, | |
| "grad_norm": 0.6572875943312029, | |
| "learning_rate": 7.1583612709525405e-06, | |
| "loss": 0.3783, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.900958466453674, | |
| "grad_norm": 0.6593416011341751, | |
| "learning_rate": 7.122735042275013e-06, | |
| "loss": 0.3675, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.9041533546325877, | |
| "grad_norm": 0.6249202723608295, | |
| "learning_rate": 7.0871485845217235e-06, | |
| "loss": 0.4192, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.9073482428115016, | |
| "grad_norm": 0.6174171894639658, | |
| "learning_rate": 7.05160238958561e-06, | |
| "loss": 0.3545, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.9105431309904153, | |
| "grad_norm": 0.5978066146036624, | |
| "learning_rate": 7.016096948803082e-06, | |
| "loss": 0.3882, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.9137380191693292, | |
| "grad_norm": 0.7508662460345255, | |
| "learning_rate": 6.980632752947221e-06, | |
| "loss": 0.4201, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.9169329073482428, | |
| "grad_norm": 0.6674809760382143, | |
| "learning_rate": 6.945210292221003e-06, | |
| "loss": 0.4007, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.9201277955271565, | |
| "grad_norm": 0.6845401841035883, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.4242, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.9233226837060702, | |
| "grad_norm": 0.6747837702166705, | |
| "learning_rate": 6.87449253407823e-06, | |
| "loss": 0.3484, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.926517571884984, | |
| "grad_norm": 0.6572349129459626, | |
| "learning_rate": 6.839198214156146e-06, | |
| "loss": 0.399, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.9297124600638977, | |
| "grad_norm": 0.667673745198207, | |
| "learning_rate": 6.803947584339148e-06, | |
| "loss": 0.3812, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.9329073482428116, | |
| "grad_norm": 0.7997277616197571, | |
| "learning_rate": 6.768741131878196e-06, | |
| "loss": 0.3719, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.9361022364217253, | |
| "grad_norm": 0.6607956272467509, | |
| "learning_rate": 6.733579343413621e-06, | |
| "loss": 0.4025, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.939297124600639, | |
| "grad_norm": 0.6424153208610056, | |
| "learning_rate": 6.69846270496838e-06, | |
| "loss": 0.4088, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.9424920127795526, | |
| "grad_norm": 0.6841836944354628, | |
| "learning_rate": 6.66339170194134e-06, | |
| "loss": 0.3638, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.9456869009584663, | |
| "grad_norm": 0.7327498988996838, | |
| "learning_rate": 6.628366819100586e-06, | |
| "loss": 0.3958, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.9488817891373802, | |
| "grad_norm": 0.6623970255054498, | |
| "learning_rate": 6.593388540576699e-06, | |
| "loss": 0.4044, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.952076677316294, | |
| "grad_norm": 0.6762762317320167, | |
| "learning_rate": 6.55845734985607e-06, | |
| "loss": 0.341, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.9552715654952078, | |
| "grad_norm": 0.7352041630492657, | |
| "learning_rate": 6.523573729774234e-06, | |
| "loss": 0.419, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.9584664536741214, | |
| "grad_norm": 0.6937804334119967, | |
| "learning_rate": 6.48873816250917e-06, | |
| "loss": 0.3757, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.961661341853035, | |
| "grad_norm": 0.7897600638634236, | |
| "learning_rate": 6.453951129574644e-06, | |
| "loss": 0.4072, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.9648562300319488, | |
| "grad_norm": 0.760236193668744, | |
| "learning_rate": 6.419213111813576e-06, | |
| "loss": 0.3435, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.9680511182108626, | |
| "grad_norm": 0.8111151020455108, | |
| "learning_rate": 6.384524589391358e-06, | |
| "loss": 0.3664, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.9712460063897763, | |
| "grad_norm": 0.6283684271490623, | |
| "learning_rate": 6.349886041789236e-06, | |
| "loss": 0.3853, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.9744408945686902, | |
| "grad_norm": 0.6193900473349803, | |
| "learning_rate": 6.315297947797691e-06, | |
| "loss": 0.3598, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.9776357827476039, | |
| "grad_norm": 0.8051890922129095, | |
| "learning_rate": 6.280760785509802e-06, | |
| "loss": 0.3793, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.9808306709265175, | |
| "grad_norm": 0.7239793820151423, | |
| "learning_rate": 6.24627503231464e-06, | |
| "loss": 0.3721, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.9840255591054312, | |
| "grad_norm": 0.7063077062824192, | |
| "learning_rate": 6.211841164890689e-06, | |
| "loss": 0.3797, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.9872204472843449, | |
| "grad_norm": 0.7372976490512757, | |
| "learning_rate": 6.177459659199237e-06, | |
| "loss": 0.3738, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.9904153354632588, | |
| "grad_norm": 0.6839285813717391, | |
| "learning_rate": 6.1431309904777915e-06, | |
| "loss": 0.3894, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.9936102236421727, | |
| "grad_norm": 0.6817876012906897, | |
| "learning_rate": 6.108855633233546e-06, | |
| "loss": 0.4159, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.9968051118210863, | |
| "grad_norm": 0.6386452889929469, | |
| "learning_rate": 6.074634061236777e-06, | |
| "loss": 0.3517, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.638099341863494, | |
| "learning_rate": 6.040466747514324e-06, | |
| "loss": 0.3387, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.0031948881789137, | |
| "grad_norm": 0.8271347304416454, | |
| "learning_rate": 6.006354164343047e-06, | |
| "loss": 0.2879, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.0063897763578273, | |
| "grad_norm": 0.646769849130275, | |
| "learning_rate": 5.972296783243281e-06, | |
| "loss": 0.2889, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.009584664536741, | |
| "grad_norm": 0.6621296057507008, | |
| "learning_rate": 5.93829507497235e-06, | |
| "loss": 0.3002, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.012779552715655, | |
| "grad_norm": 0.6556200090741601, | |
| "learning_rate": 5.904349509518024e-06, | |
| "loss": 0.2477, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.015974440894569, | |
| "grad_norm": 0.9911955110119478, | |
| "learning_rate": 5.87046055609205e-06, | |
| "loss": 0.3116, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.0191693290734825, | |
| "grad_norm": 0.80574892593569, | |
| "learning_rate": 5.836628683123659e-06, | |
| "loss": 0.2621, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.022364217252396, | |
| "grad_norm": 0.6910559059978796, | |
| "learning_rate": 5.8028543582530805e-06, | |
| "loss": 0.2855, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.02555910543131, | |
| "grad_norm": 0.5968749973323358, | |
| "learning_rate": 5.769138048325087e-06, | |
| "loss": 0.2414, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.0287539936102235, | |
| "grad_norm": 0.6536748958061217, | |
| "learning_rate": 5.735480219382554e-06, | |
| "loss": 0.2973, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.0319488817891376, | |
| "grad_norm": 0.6989686634311535, | |
| "learning_rate": 5.701881336659992e-06, | |
| "loss": 0.268, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.0351437699680512, | |
| "grad_norm": 0.6569035525910143, | |
| "learning_rate": 5.668341864577125e-06, | |
| "loss": 0.3044, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.038338658146965, | |
| "grad_norm": 0.602669314331145, | |
| "learning_rate": 5.634862266732492e-06, | |
| "loss": 0.281, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.0415335463258786, | |
| "grad_norm": 0.5728367364913933, | |
| "learning_rate": 5.601443005897012e-06, | |
| "loss": 0.332, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.0447284345047922, | |
| "grad_norm": 0.6368451330277513, | |
| "learning_rate": 5.5680845440075885e-06, | |
| "loss": 0.2893, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.047923322683706, | |
| "grad_norm": 0.5734258778625324, | |
| "learning_rate": 5.534787342160752e-06, | |
| "loss": 0.2975, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.0511182108626196, | |
| "grad_norm": 0.6266002126502631, | |
| "learning_rate": 5.501551860606251e-06, | |
| "loss": 0.2898, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.0543130990415337, | |
| "grad_norm": 0.5878513634227986, | |
| "learning_rate": 5.468378558740719e-06, | |
| "loss": 0.2666, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.0575079872204474, | |
| "grad_norm": 0.5702013097999747, | |
| "learning_rate": 5.435267895101303e-06, | |
| "loss": 0.2692, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.060702875399361, | |
| "grad_norm": 0.602338703074719, | |
| "learning_rate": 5.402220327359335e-06, | |
| "loss": 0.2721, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.0638977635782747, | |
| "grad_norm": 0.5811139388678999, | |
| "learning_rate": 5.369236312314017e-06, | |
| "loss": 0.2928, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.0670926517571884, | |
| "grad_norm": 0.5882325787521038, | |
| "learning_rate": 5.336316305886078e-06, | |
| "loss": 0.2966, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.070287539936102, | |
| "grad_norm": 0.6412192942110215, | |
| "learning_rate": 5.303460763111508e-06, | |
| "loss": 0.2946, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.073482428115016, | |
| "grad_norm": 0.6529865737566748, | |
| "learning_rate": 5.270670138135234e-06, | |
| "loss": 0.2574, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.07667731629393, | |
| "grad_norm": 0.5986874454585268, | |
| "learning_rate": 5.237944884204864e-06, | |
| "loss": 0.2461, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.0798722044728435, | |
| "grad_norm": 0.5492039524576837, | |
| "learning_rate": 5.205285453664424e-06, | |
| "loss": 0.3039, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.083067092651757, | |
| "grad_norm": 0.6487742195433347, | |
| "learning_rate": 5.1726922979480805e-06, | |
| "loss": 0.2899, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.086261980830671, | |
| "grad_norm": 0.6995349782327247, | |
| "learning_rate": 5.14016586757394e-06, | |
| "loss": 0.288, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.0894568690095845, | |
| "grad_norm": 0.5774907524747939, | |
| "learning_rate": 5.107706612137776e-06, | |
| "loss": 0.2571, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.0926517571884986, | |
| "grad_norm": 0.602454561129501, | |
| "learning_rate": 5.075314980306861e-06, | |
| "loss": 0.2588, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.0958466453674123, | |
| "grad_norm": 0.615167046308701, | |
| "learning_rate": 5.042991419813723e-06, | |
| "loss": 0.2361, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.099041533546326, | |
| "grad_norm": 0.5655289081350507, | |
| "learning_rate": 5.010736377449983e-06, | |
| "loss": 0.2763, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.1022364217252396, | |
| "grad_norm": 0.5867220298505782, | |
| "learning_rate": 4.978550299060179e-06, | |
| "loss": 0.256, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.1054313099041533, | |
| "grad_norm": 0.5617761964475673, | |
| "learning_rate": 4.946433629535585e-06, | |
| "loss": 0.3115, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.108626198083067, | |
| "grad_norm": 0.530087482930863, | |
| "learning_rate": 4.914386812808073e-06, | |
| "loss": 0.2481, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.1118210862619806, | |
| "grad_norm": 0.5821201498478065, | |
| "learning_rate": 4.882410291843989e-06, | |
| "loss": 0.2687, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.1150159744408947, | |
| "grad_norm": 0.6009245895614106, | |
| "learning_rate": 4.850504508638004e-06, | |
| "loss": 0.2672, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.1182108626198084, | |
| "grad_norm": 0.5812729760109654, | |
| "learning_rate": 4.818669904207021e-06, | |
| "loss": 0.2669, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.121405750798722, | |
| "grad_norm": 0.5562495650685109, | |
| "learning_rate": 4.786906918584083e-06, | |
| "loss": 0.2708, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.1246006389776357, | |
| "grad_norm": 0.565127621348711, | |
| "learning_rate": 4.755215990812277e-06, | |
| "loss": 0.2521, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.1277955271565494, | |
| "grad_norm": 0.5406508398417975, | |
| "learning_rate": 4.7235975589386715e-06, | |
| "loss": 0.2579, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.130990415335463, | |
| "grad_norm": 0.5579616048617105, | |
| "learning_rate": 4.692052060008271e-06, | |
| "loss": 0.3169, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.134185303514377, | |
| "grad_norm": 0.565592556878981, | |
| "learning_rate": 4.6605799300579546e-06, | |
| "loss": 0.2653, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.137380191693291, | |
| "grad_norm": 0.5683205240290914, | |
| "learning_rate": 4.629181604110464e-06, | |
| "loss": 0.2718, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.1405750798722045, | |
| "grad_norm": 0.6460113832000962, | |
| "learning_rate": 4.597857516168397e-06, | |
| "loss": 0.2682, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.143769968051118, | |
| "grad_norm": 0.5862339654865363, | |
| "learning_rate": 4.566608099208185e-06, | |
| "loss": 0.3328, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.146964856230032, | |
| "grad_norm": 0.5435510212641139, | |
| "learning_rate": 4.535433785174124e-06, | |
| "loss": 0.2685, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.1501597444089455, | |
| "grad_norm": 0.5399021750624424, | |
| "learning_rate": 4.50433500497241e-06, | |
| "loss": 0.2739, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.1533546325878596, | |
| "grad_norm": 0.5286896610464358, | |
| "learning_rate": 4.4733121884651665e-06, | |
| "loss": 0.3251, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.1565495207667733, | |
| "grad_norm": 0.5432882677437948, | |
| "learning_rate": 4.442365764464509e-06, | |
| "loss": 0.3201, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.159744408945687, | |
| "grad_norm": 0.5748779573239078, | |
| "learning_rate": 4.411496160726632e-06, | |
| "loss": 0.2604, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.1629392971246006, | |
| "grad_norm": 0.5033583313339888, | |
| "learning_rate": 4.38070380394587e-06, | |
| "loss": 0.2686, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.1661341853035143, | |
| "grad_norm": 0.5985918860143106, | |
| "learning_rate": 4.349989119748815e-06, | |
| "loss": 0.2199, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.169329073482428, | |
| "grad_norm": 0.5377363065642814, | |
| "learning_rate": 4.319352532688444e-06, | |
| "loss": 0.2533, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.1725239616613417, | |
| "grad_norm": 0.5208201227205966, | |
| "learning_rate": 4.288794466238224e-06, | |
| "loss": 0.2494, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.1757188498402558, | |
| "grad_norm": 0.546873580650872, | |
| "learning_rate": 4.2583153427862765e-06, | |
| "loss": 0.2587, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.1789137380191694, | |
| "grad_norm": 0.6235527130870171, | |
| "learning_rate": 4.227915583629543e-06, | |
| "loss": 0.2825, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.182108626198083, | |
| "grad_norm": 0.5268102602354202, | |
| "learning_rate": 4.197595608967941e-06, | |
| "loss": 0.275, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.1853035143769968, | |
| "grad_norm": 0.5401897921628086, | |
| "learning_rate": 4.167355837898585e-06, | |
| "loss": 0.3187, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.1884984025559104, | |
| "grad_norm": 0.5995690052883824, | |
| "learning_rate": 4.137196688409962e-06, | |
| "loss": 0.2842, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.191693290734824, | |
| "grad_norm": 0.530440581581717, | |
| "learning_rate": 4.107118577376172e-06, | |
| "loss": 0.2655, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.194888178913738, | |
| "grad_norm": 0.5468145774158746, | |
| "learning_rate": 4.0771219205511756e-06, | |
| "loss": 0.3003, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.198083067092652, | |
| "grad_norm": 0.5307256800656307, | |
| "learning_rate": 4.04720713256302e-06, | |
| "loss": 0.29, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.2012779552715656, | |
| "grad_norm": 0.5876558249492184, | |
| "learning_rate": 4.017374626908125e-06, | |
| "loss": 0.3012, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.2044728434504792, | |
| "grad_norm": 0.5288296732250444, | |
| "learning_rate": 3.987624815945575e-06, | |
| "loss": 0.3387, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.207667731629393, | |
| "grad_norm": 0.5334676159553654, | |
| "learning_rate": 3.957958110891395e-06, | |
| "loss": 0.3118, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.2108626198083066, | |
| "grad_norm": 0.5018942342913997, | |
| "learning_rate": 3.9283749218128885e-06, | |
| "loss": 0.2442, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.2140575079872207, | |
| "grad_norm": 0.5460501175933514, | |
| "learning_rate": 3.898875657622963e-06, | |
| "loss": 0.2751, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.2172523961661343, | |
| "grad_norm": 0.5238545161103514, | |
| "learning_rate": 3.8694607260744745e-06, | |
| "loss": 0.3171, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.220447284345048, | |
| "grad_norm": 0.5648156161744206, | |
| "learning_rate": 3.840130533754585e-06, | |
| "loss": 0.3184, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.2236421725239617, | |
| "grad_norm": 0.5818824089460198, | |
| "learning_rate": 3.8108854860791657e-06, | |
| "loss": 0.2745, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.2268370607028753, | |
| "grad_norm": 0.5940846743967164, | |
| "learning_rate": 3.781725987287166e-06, | |
| "loss": 0.2612, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.230031948881789, | |
| "grad_norm": 0.48818409466248414, | |
| "learning_rate": 3.7526524404350407e-06, | |
| "loss": 0.2459, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.2332268370607027, | |
| "grad_norm": 0.553611582334448, | |
| "learning_rate": 3.7236652473911817e-06, | |
| "loss": 0.2894, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.236421725239617, | |
| "grad_norm": 0.639389907063906, | |
| "learning_rate": 3.6947648088303523e-06, | |
| "loss": 0.2926, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.2396166134185305, | |
| "grad_norm": 0.5480348447624199, | |
| "learning_rate": 3.6659515242281496e-06, | |
| "loss": 0.2128, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.242811501597444, | |
| "grad_norm": 0.5565653754577661, | |
| "learning_rate": 3.6372257918555e-06, | |
| "loss": 0.2949, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.246006389776358, | |
| "grad_norm": 0.6313270010180382, | |
| "learning_rate": 3.6085880087731316e-06, | |
| "loss": 0.2719, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.2492012779552715, | |
| "grad_norm": 0.5254468501533311, | |
| "learning_rate": 3.580038570826093e-06, | |
| "loss": 0.2646, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.252396166134185, | |
| "grad_norm": 0.5449168135123015, | |
| "learning_rate": 3.5515778726382967e-06, | |
| "loss": 0.3125, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.255591054313099, | |
| "grad_norm": 0.577769744560431, | |
| "learning_rate": 3.5232063076070377e-06, | |
| "loss": 0.2877, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.258785942492013, | |
| "grad_norm": 0.5337396711928698, | |
| "learning_rate": 3.494924267897585e-06, | |
| "loss": 0.2254, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.2619808306709266, | |
| "grad_norm": 0.5557081825995588, | |
| "learning_rate": 3.4667321444377334e-06, | |
| "loss": 0.2455, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.2651757188498403, | |
| "grad_norm": 0.5763386812267897, | |
| "learning_rate": 3.4386303269124142e-06, | |
| "loss": 0.2458, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.268370607028754, | |
| "grad_norm": 0.5750561585588471, | |
| "learning_rate": 3.4106192037583176e-06, | |
| "loss": 0.2817, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.2715654952076676, | |
| "grad_norm": 0.5403613324177238, | |
| "learning_rate": 3.382699162158498e-06, | |
| "loss": 0.2796, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.2747603833865817, | |
| "grad_norm": 0.49899972805059195, | |
| "learning_rate": 3.3548705880370538e-06, | |
| "loss": 0.2606, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.2779552715654954, | |
| "grad_norm": 0.5211130243599169, | |
| "learning_rate": 3.327133866053758e-06, | |
| "loss": 0.2393, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.281150159744409, | |
| "grad_norm": 0.5841971667091718, | |
| "learning_rate": 3.299489379598777e-06, | |
| "loss": 0.2831, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.2843450479233227, | |
| "grad_norm": 0.5719620976677046, | |
| "learning_rate": 3.271937510787343e-06, | |
| "loss": 0.2411, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.2875399361022364, | |
| "grad_norm": 0.540191979436821, | |
| "learning_rate": 3.2444786404544828e-06, | |
| "loss": 0.2698, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.29073482428115, | |
| "grad_norm": 0.6112105115397988, | |
| "learning_rate": 3.217113148149765e-06, | |
| "loss": 0.3048, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.2939297124600637, | |
| "grad_norm": 0.5771913514730167, | |
| "learning_rate": 3.1898414121320277e-06, | |
| "loss": 0.2752, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.297124600638978, | |
| "grad_norm": 0.5501110706666438, | |
| "learning_rate": 3.162663809364178e-06, | |
| "loss": 0.1977, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.3003194888178915, | |
| "grad_norm": 0.46927847463459355, | |
| "learning_rate": 3.135580715507961e-06, | |
| "loss": 0.2813, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.303514376996805, | |
| "grad_norm": 0.5043271742582294, | |
| "learning_rate": 3.1085925049187738e-06, | |
| "loss": 0.2815, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.306709265175719, | |
| "grad_norm": 0.47860976047812787, | |
| "learning_rate": 3.0816995506405e-06, | |
| "loss": 0.2394, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.3099041533546325, | |
| "grad_norm": 0.5772481927495404, | |
| "learning_rate": 3.0549022244003368e-06, | |
| "loss": 0.2441, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.313099041533546, | |
| "grad_norm": 0.5196025768297203, | |
| "learning_rate": 3.0282008966036647e-06, | |
| "loss": 0.2814, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.31629392971246, | |
| "grad_norm": 0.600980799509102, | |
| "learning_rate": 3.001595936328939e-06, | |
| "loss": 0.3186, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.319488817891374, | |
| "grad_norm": 0.5384170460565109, | |
| "learning_rate": 2.975087711322567e-06, | |
| "loss": 0.281, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.3226837060702876, | |
| "grad_norm": 0.5532736843981153, | |
| "learning_rate": 2.948676587993834e-06, | |
| "loss": 0.2801, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.3258785942492013, | |
| "grad_norm": 0.5448894944106978, | |
| "learning_rate": 2.9223629314098514e-06, | |
| "loss": 0.2722, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.329073482428115, | |
| "grad_norm": 0.5493854347649725, | |
| "learning_rate": 2.8961471052904855e-06, | |
| "loss": 0.2564, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.3322683706070286, | |
| "grad_norm": 0.5697309480878635, | |
| "learning_rate": 2.8700294720033486e-06, | |
| "loss": 0.2384, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.3354632587859427, | |
| "grad_norm": 0.5481372632300319, | |
| "learning_rate": 2.8440103925587904e-06, | |
| "loss": 0.3108, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.3386581469648564, | |
| "grad_norm": 0.5951546959209056, | |
| "learning_rate": 2.8180902266048947e-06, | |
| "loss": 0.288, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.34185303514377, | |
| "grad_norm": 0.5593159640593378, | |
| "learning_rate": 2.792269332422517e-06, | |
| "loss": 0.2797, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.3450479233226837, | |
| "grad_norm": 0.548130461510605, | |
| "learning_rate": 2.7665480669203383e-06, | |
| "loss": 0.291, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.3482428115015974, | |
| "grad_norm": 0.50647579879316, | |
| "learning_rate": 2.740926785629915e-06, | |
| "loss": 0.2884, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.351437699680511, | |
| "grad_norm": 0.5098520788004228, | |
| "learning_rate": 2.7154058427007822e-06, | |
| "loss": 0.2657, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.3546325878594248, | |
| "grad_norm": 0.5226212845956969, | |
| "learning_rate": 2.6899855908955464e-06, | |
| "loss": 0.3284, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.357827476038339, | |
| "grad_norm": 0.5311184509918885, | |
| "learning_rate": 2.6646663815850092e-06, | |
| "loss": 0.2598, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.3610223642172525, | |
| "grad_norm": 0.5427537786360982, | |
| "learning_rate": 2.639448564743328e-06, | |
| "loss": 0.253, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.364217252396166, | |
| "grad_norm": 0.5185119895770091, | |
| "learning_rate": 2.614332488943152e-06, | |
| "loss": 0.2433, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.36741214057508, | |
| "grad_norm": 0.5551939240347452, | |
| "learning_rate": 2.5893185013508195e-06, | |
| "loss": 0.2509, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 2.3706070287539935, | |
| "grad_norm": 0.5190495921348532, | |
| "learning_rate": 2.564406947721566e-06, | |
| "loss": 0.2895, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.373801916932907, | |
| "grad_norm": 0.5382445162395503, | |
| "learning_rate": 2.539598172394727e-06, | |
| "loss": 0.2921, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 2.376996805111821, | |
| "grad_norm": 0.5782994469073974, | |
| "learning_rate": 2.514892518288988e-06, | |
| "loss": 0.2628, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.380191693290735, | |
| "grad_norm": 0.5004402464282985, | |
| "learning_rate": 2.490290326897653e-06, | |
| "loss": 0.2707, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 2.3833865814696487, | |
| "grad_norm": 0.6037080436677754, | |
| "learning_rate": 2.4657919382839034e-06, | |
| "loss": 0.2648, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.3865814696485623, | |
| "grad_norm": 0.538405984748164, | |
| "learning_rate": 2.4413976910761117e-06, | |
| "loss": 0.3182, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 2.389776357827476, | |
| "grad_norm": 0.5439877759038688, | |
| "learning_rate": 2.417107922463169e-06, | |
| "loss": 0.3184, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.3929712460063897, | |
| "grad_norm": 0.583035854090823, | |
| "learning_rate": 2.3929229681898005e-06, | |
| "loss": 0.2202, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 2.3961661341853033, | |
| "grad_norm": 0.5884416492925523, | |
| "learning_rate": 2.3688431625519417e-06, | |
| "loss": 0.283, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.3993610223642174, | |
| "grad_norm": 0.5355399468230221, | |
| "learning_rate": 2.3448688383921183e-06, | |
| "loss": 0.2694, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 2.402555910543131, | |
| "grad_norm": 0.47662118367586076, | |
| "learning_rate": 2.3210003270948367e-06, | |
| "loss": 0.2694, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.405750798722045, | |
| "grad_norm": 0.4805391075501156, | |
| "learning_rate": 2.297237958582005e-06, | |
| "loss": 0.2775, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.4089456869009584, | |
| "grad_norm": 0.5202726458297047, | |
| "learning_rate": 2.2735820613083837e-06, | |
| "loss": 0.2687, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.412140575079872, | |
| "grad_norm": 0.5392847644844359, | |
| "learning_rate": 2.2500329622570296e-06, | |
| "loss": 0.2961, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.415335463258786, | |
| "grad_norm": 0.5240783463371834, | |
| "learning_rate": 2.2265909869347823e-06, | |
| "loss": 0.3119, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.4185303514377, | |
| "grad_norm": 0.5569505587936721, | |
| "learning_rate": 2.2032564593677773e-06, | |
| "loss": 0.261, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.4217252396166136, | |
| "grad_norm": 0.515386283950919, | |
| "learning_rate": 2.180029702096946e-06, | |
| "loss": 0.2909, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.4249201277955272, | |
| "grad_norm": 0.5645060312581792, | |
| "learning_rate": 2.156911036173568e-06, | |
| "loss": 0.2765, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.428115015974441, | |
| "grad_norm": 0.5536403998357785, | |
| "learning_rate": 2.1339007811548395e-06, | |
| "loss": 0.2152, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.4313099041533546, | |
| "grad_norm": 0.5157472478536549, | |
| "learning_rate": 2.110999255099444e-06, | |
| "loss": 0.2869, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.4345047923322682, | |
| "grad_norm": 0.5421226829224044, | |
| "learning_rate": 2.088206774563161e-06, | |
| "loss": 0.2442, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.437699680511182, | |
| "grad_norm": 0.5221858315858158, | |
| "learning_rate": 2.065523654594497e-06, | |
| "loss": 0.2905, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.440894568690096, | |
| "grad_norm": 0.5401675654904415, | |
| "learning_rate": 2.0429502087303164e-06, | |
| "loss": 0.2916, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.4440894568690097, | |
| "grad_norm": 0.5219535133275498, | |
| "learning_rate": 2.020486748991526e-06, | |
| "loss": 0.268, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.4472843450479234, | |
| "grad_norm": 0.5709710992260904, | |
| "learning_rate": 1.998133585878743e-06, | |
| "loss": 0.2779, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.450479233226837, | |
| "grad_norm": 0.4824676976132383, | |
| "learning_rate": 1.9758910283680134e-06, | |
| "loss": 0.2553, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.4536741214057507, | |
| "grad_norm": 0.5109201333375051, | |
| "learning_rate": 1.9537593839065484e-06, | |
| "loss": 0.3015, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.4568690095846644, | |
| "grad_norm": 0.5863480432635425, | |
| "learning_rate": 1.931738958408457e-06, | |
| "loss": 0.2495, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.460063897763578, | |
| "grad_norm": 0.5220281797772327, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.2843, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.463258785942492, | |
| "grad_norm": 0.5508369426630649, | |
| "learning_rate": 1.8880329802680253e-06, | |
| "loss": 0.2824, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.466453674121406, | |
| "grad_norm": 0.5514083887348668, | |
| "learning_rate": 1.866348031750499e-06, | |
| "loss": 0.2532, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.4696485623003195, | |
| "grad_norm": 0.5007229218626282, | |
| "learning_rate": 1.844775510437613e-06, | |
| "loss": 0.2775, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.472843450479233, | |
| "grad_norm": 0.5654037998263154, | |
| "learning_rate": 1.8233157145150183e-06, | |
| "loss": 0.2169, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.476038338658147, | |
| "grad_norm": 0.5079251173769247, | |
| "learning_rate": 1.8019689406102125e-06, | |
| "loss": 0.275, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.479233226837061, | |
| "grad_norm": 0.7598992351846287, | |
| "learning_rate": 1.7807354837884583e-06, | |
| "loss": 0.2886, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.4824281150159746, | |
| "grad_norm": 0.5402500169735379, | |
| "learning_rate": 1.759615637548686e-06, | |
| "loss": 0.2356, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.4856230031948883, | |
| "grad_norm": 0.5153724379287912, | |
| "learning_rate": 1.7386096938194585e-06, | |
| "loss": 0.3273, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.488817891373802, | |
| "grad_norm": 0.5656481880772636, | |
| "learning_rate": 1.717717942954914e-06, | |
| "loss": 0.2978, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.4920127795527156, | |
| "grad_norm": 0.5600985228279312, | |
| "learning_rate": 1.6969406737307625e-06, | |
| "loss": 0.2914, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.4952076677316293, | |
| "grad_norm": 0.5271143479597251, | |
| "learning_rate": 1.6762781733403034e-06, | |
| "loss": 0.256, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.498402555910543, | |
| "grad_norm": 0.5290034655652396, | |
| "learning_rate": 1.6557307273904355e-06, | |
| "loss": 0.3164, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.501597444089457, | |
| "grad_norm": 0.5282890946530933, | |
| "learning_rate": 1.6352986198977327e-06, | |
| "loss": 0.2548, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.5047923322683707, | |
| "grad_norm": 0.5103345129350556, | |
| "learning_rate": 1.614982133284495e-06, | |
| "loss": 0.2616, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.5079872204472844, | |
| "grad_norm": 0.49079582574254493, | |
| "learning_rate": 1.5947815483748573e-06, | |
| "loss": 0.2876, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.511182108626198, | |
| "grad_norm": 0.5492629339702724, | |
| "learning_rate": 1.5746971443909143e-06, | |
| "loss": 0.2269, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.5143769968051117, | |
| "grad_norm": 0.5084290858342608, | |
| "learning_rate": 1.5547291989488444e-06, | |
| "loss": 0.2656, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.5175718849840254, | |
| "grad_norm": 0.4983976795381335, | |
| "learning_rate": 1.5348779880550812e-06, | |
| "loss": 0.2894, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.520766773162939, | |
| "grad_norm": 0.5238056353329883, | |
| "learning_rate": 1.5151437861025032e-06, | |
| "loss": 0.2362, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.523961661341853, | |
| "grad_norm": 0.518108096230708, | |
| "learning_rate": 1.49552686586663e-06, | |
| "loss": 0.2854, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.527156549520767, | |
| "grad_norm": 0.5381430513007909, | |
| "learning_rate": 1.4760274985018619e-06, | |
| "loss": 0.2353, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.5303514376996805, | |
| "grad_norm": 0.4929746328002691, | |
| "learning_rate": 1.4566459535377254e-06, | |
| "loss": 0.2667, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.533546325878594, | |
| "grad_norm": 0.5046421029775986, | |
| "learning_rate": 1.4373824988751473e-06, | |
| "loss": 0.3179, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.536741214057508, | |
| "grad_norm": 0.5157889946614715, | |
| "learning_rate": 1.4182374007827605e-06, | |
| "loss": 0.2662, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.539936102236422, | |
| "grad_norm": 0.523857835711976, | |
| "learning_rate": 1.3992109238932106e-06, | |
| "loss": 0.2656, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.543130990415335, | |
| "grad_norm": 0.5628959482047667, | |
| "learning_rate": 1.3803033311995072e-06, | |
| "loss": 0.3533, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.5463258785942493, | |
| "grad_norm": 0.5176298006219728, | |
| "learning_rate": 1.361514884051388e-06, | |
| "loss": 0.2392, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.549520766773163, | |
| "grad_norm": 0.5305179409745193, | |
| "learning_rate": 1.3428458421517031e-06, | |
| "loss": 0.2874, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.5527156549520766, | |
| "grad_norm": 0.5133886968214696, | |
| "learning_rate": 1.324296463552821e-06, | |
| "loss": 0.2852, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.5559105431309903, | |
| "grad_norm": 0.5299916695554814, | |
| "learning_rate": 1.3058670046530775e-06, | |
| "loss": 0.2755, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.559105431309904, | |
| "grad_norm": 0.48847954753189066, | |
| "learning_rate": 1.2875577201932132e-06, | |
| "loss": 0.2565, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.562300319488818, | |
| "grad_norm": 0.5247751090570331, | |
| "learning_rate": 1.2693688632528623e-06, | |
| "loss": 0.286, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.5654952076677318, | |
| "grad_norm": 0.5452507249325443, | |
| "learning_rate": 1.2513006852470554e-06, | |
| "loss": 0.2564, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.5686900958466454, | |
| "grad_norm": 0.5355228334038434, | |
| "learning_rate": 1.2333534359227383e-06, | |
| "loss": 0.248, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.571884984025559, | |
| "grad_norm": 0.5077476188872533, | |
| "learning_rate": 1.2155273633553222e-06, | |
| "loss": 0.2174, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.5750798722044728, | |
| "grad_norm": 0.5408303047151787, | |
| "learning_rate": 1.1978227139452624e-06, | |
| "loss": 0.2371, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.5782747603833864, | |
| "grad_norm": 0.5799598867804451, | |
| "learning_rate": 1.1802397324146375e-06, | |
| "loss": 0.2508, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.5814696485623, | |
| "grad_norm": 0.5223479303361819, | |
| "learning_rate": 1.1627786618037761e-06, | |
| "loss": 0.2861, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.584664536741214, | |
| "grad_norm": 0.539996486264599, | |
| "learning_rate": 1.1454397434679022e-06, | |
| "loss": 0.2267, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.587859424920128, | |
| "grad_norm": 0.5811749141190907, | |
| "learning_rate": 1.1282232170737862e-06, | |
| "loss": 0.2569, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.5910543130990416, | |
| "grad_norm": 0.5179064965950383, | |
| "learning_rate": 1.1111293205964413e-06, | |
| "loss": 0.3336, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.594249201277955, | |
| "grad_norm": 0.5171565494729409, | |
| "learning_rate": 1.0941582903158344e-06, | |
| "loss": 0.2548, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.597444089456869, | |
| "grad_norm": 0.5396558702613092, | |
| "learning_rate": 1.0773103608136126e-06, | |
| "loss": 0.2928, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.600638977635783, | |
| "grad_norm": 0.48371322074600365, | |
| "learning_rate": 1.060585764969867e-06, | |
| "loss": 0.2658, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.6038338658146962, | |
| "grad_norm": 0.5173966192381088, | |
| "learning_rate": 1.0439847339599173e-06, | |
| "loss": 0.2858, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.6070287539936103, | |
| "grad_norm": 0.6191072192150288, | |
| "learning_rate": 1.0275074972511034e-06, | |
| "loss": 0.3068, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.610223642172524, | |
| "grad_norm": 0.49890486021543723, | |
| "learning_rate": 1.0111542825996245e-06, | |
| "loss": 0.2881, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.6134185303514377, | |
| "grad_norm": 0.4826406747558694, | |
| "learning_rate": 9.949253160473914e-07, | |
| "loss": 0.3192, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.6166134185303513, | |
| "grad_norm": 0.49492978407234944, | |
| "learning_rate": 9.788208219188932e-07, | |
| "loss": 0.2752, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.619808306709265, | |
| "grad_norm": 0.5247273176934111, | |
| "learning_rate": 9.628410228181085e-07, | |
| "loss": 0.2852, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.623003194888179, | |
| "grad_norm": 0.5305670666351503, | |
| "learning_rate": 9.469861396254154e-07, | |
| "loss": 0.2736, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.626198083067093, | |
| "grad_norm": 0.5187931213108388, | |
| "learning_rate": 9.312563914945461e-07, | |
| "loss": 0.2896, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.6293929712460065, | |
| "grad_norm": 0.49768159911322457, | |
| "learning_rate": 9.156519958495602e-07, | |
| "loss": 0.2846, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.63258785942492, | |
| "grad_norm": 0.5290288470627704, | |
| "learning_rate": 9.001731683818338e-07, | |
| "loss": 0.277, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.635782747603834, | |
| "grad_norm": 0.4902749919843502, | |
| "learning_rate": 8.848201230470777e-07, | |
| "loss": 0.3221, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.6389776357827475, | |
| "grad_norm": 0.5040777631439952, | |
| "learning_rate": 8.695930720623857e-07, | |
| "loss": 0.3183, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.642172523961661, | |
| "grad_norm": 0.5523788380454391, | |
| "learning_rate": 8.544922259032951e-07, | |
| "loss": 0.2696, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.6453674121405752, | |
| "grad_norm": 0.5077339512969868, | |
| "learning_rate": 8.395177933008802e-07, | |
| "loss": 0.2786, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.648562300319489, | |
| "grad_norm": 0.5148137411171129, | |
| "learning_rate": 8.246699812388714e-07, | |
| "loss": 0.2808, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.6517571884984026, | |
| "grad_norm": 0.5326084811298537, | |
| "learning_rate": 8.099489949507843e-07, | |
| "loss": 0.3013, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.6549520766773163, | |
| "grad_norm": 0.499399523060783, | |
| "learning_rate": 7.953550379170893e-07, | |
| "loss": 0.2509, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.65814696485623, | |
| "grad_norm": 0.4895467308504159, | |
| "learning_rate": 7.808883118624012e-07, | |
| "loss": 0.2573, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.661341853035144, | |
| "grad_norm": 0.46971413535806866, | |
| "learning_rate": 7.665490167526857e-07, | |
| "loss": 0.2546, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.6645367412140573, | |
| "grad_norm": 0.5062579399060294, | |
| "learning_rate": 7.523373507924947e-07, | |
| "loss": 0.2895, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.6677316293929714, | |
| "grad_norm": 0.49408089475640665, | |
| "learning_rate": 7.382535104222366e-07, | |
| "loss": 0.3381, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.670926517571885, | |
| "grad_norm": 0.5079489630283403, | |
| "learning_rate": 7.242976903154442e-07, | |
| "loss": 0.2792, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.6741214057507987, | |
| "grad_norm": 0.5367172556542773, | |
| "learning_rate": 7.104700833761014e-07, | |
| "loss": 0.2786, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.6773162939297124, | |
| "grad_norm": 0.5173230966750194, | |
| "learning_rate": 6.967708807359664e-07, | |
| "loss": 0.2594, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.680511182108626, | |
| "grad_norm": 0.5793340060415133, | |
| "learning_rate": 6.83200271751927e-07, | |
| "loss": 0.2284, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.68370607028754, | |
| "grad_norm": 0.5594468901566473, | |
| "learning_rate": 6.697584440033989e-07, | |
| "loss": 0.2981, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.686900958466454, | |
| "grad_norm": 0.49965854247621927, | |
| "learning_rate": 6.564455832897099e-07, | |
| "loss": 0.3002, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.6900958466453675, | |
| "grad_norm": 0.626455409492764, | |
| "learning_rate": 6.432618736275553e-07, | |
| "loss": 0.2974, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.693290734824281, | |
| "grad_norm": 0.5263238830296258, | |
| "learning_rate": 6.302074972484362e-07, | |
| "loss": 0.2628, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.696485623003195, | |
| "grad_norm": 0.5454158557360471, | |
| "learning_rate": 6.17282634596148e-07, | |
| "loss": 0.2253, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.6996805111821085, | |
| "grad_norm": 0.5073777391128576, | |
| "learning_rate": 6.044874643242904e-07, | |
| "loss": 0.2957, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.702875399361022, | |
| "grad_norm": 0.4905589432687944, | |
| "learning_rate": 5.91822163293787e-07, | |
| "loss": 0.2735, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.7060702875399363, | |
| "grad_norm": 0.5447677478507619, | |
| "learning_rate": 5.792869065704553e-07, | |
| "loss": 0.2489, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.70926517571885, | |
| "grad_norm": 0.5367437184679952, | |
| "learning_rate": 5.668818674225684e-07, | |
| "loss": 0.2868, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.7124600638977636, | |
| "grad_norm": 0.4634315253223173, | |
| "learning_rate": 5.546072173184791e-07, | |
| "loss": 0.2666, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.7156549520766773, | |
| "grad_norm": 0.48495758547770107, | |
| "learning_rate": 5.424631259242352e-07, | |
| "loss": 0.2408, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.718849840255591, | |
| "grad_norm": 0.5365013143998166, | |
| "learning_rate": 5.304497611012415e-07, | |
| "loss": 0.2742, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.722044728434505, | |
| "grad_norm": 0.5619740516471143, | |
| "learning_rate": 5.185672889039395e-07, | |
| "loss": 0.2492, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.7252396166134183, | |
| "grad_norm": 0.4959551051863538, | |
| "learning_rate": 5.068158735775098e-07, | |
| "loss": 0.2601, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.7284345047923324, | |
| "grad_norm": 0.5614699103586969, | |
| "learning_rate": 4.951956775556e-07, | |
| "loss": 0.2893, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.731629392971246, | |
| "grad_norm": 0.5478783753425022, | |
| "learning_rate": 4.837068614580875e-07, | |
| "loss": 0.2832, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.7348242811501597, | |
| "grad_norm": 0.5084538253699109, | |
| "learning_rate": 4.7234958408884925e-07, | |
| "loss": 0.2785, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.7380191693290734, | |
| "grad_norm": 0.4833191042095689, | |
| "learning_rate": 4.611240024335706e-07, | |
| "loss": 0.2525, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.741214057507987, | |
| "grad_norm": 0.5557156275435591, | |
| "learning_rate": 4.5003027165758216e-07, | |
| "loss": 0.2763, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.744408945686901, | |
| "grad_norm": 0.49396712066666637, | |
| "learning_rate": 4.3906854510370245e-07, | |
| "loss": 0.2961, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.747603833865815, | |
| "grad_norm": 0.5218855996258477, | |
| "learning_rate": 4.282389742901283e-07, | |
| "loss": 0.3145, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.7507987220447285, | |
| "grad_norm": 0.4909336132792226, | |
| "learning_rate": 4.1754170890833777e-07, | |
| "loss": 0.2901, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.753993610223642, | |
| "grad_norm": 0.532978016110172, | |
| "learning_rate": 4.069768968210186e-07, | |
| "loss": 0.2767, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.757188498402556, | |
| "grad_norm": 0.5284804740851335, | |
| "learning_rate": 3.96544684060024e-07, | |
| "loss": 0.3042, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.7603833865814695, | |
| "grad_norm": 0.5625186952805096, | |
| "learning_rate": 3.862452148243623e-07, | |
| "loss": 0.3032, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.763578274760383, | |
| "grad_norm": 0.5526705896948308, | |
| "learning_rate": 3.760786314781917e-07, | |
| "loss": 0.2162, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.7667731629392973, | |
| "grad_norm": 0.49005060610573936, | |
| "learning_rate": 3.6604507454886083e-07, | |
| "loss": 0.3009, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.769968051118211, | |
| "grad_norm": 0.5006073443686324, | |
| "learning_rate": 3.561446827249659e-07, | |
| "loss": 0.2863, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.7731629392971247, | |
| "grad_norm": 0.5313378923234043, | |
| "learning_rate": 3.463775928544288e-07, | |
| "loss": 0.2681, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.7763578274760383, | |
| "grad_norm": 0.45657247655517647, | |
| "learning_rate": 3.367439399426087e-07, | |
| "loss": 0.2833, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.779552715654952, | |
| "grad_norm": 0.5438646304278568, | |
| "learning_rate": 3.2724385715043885e-07, | |
| "loss": 0.2821, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.7827476038338657, | |
| "grad_norm": 0.4861986736846954, | |
| "learning_rate": 3.1787747579257623e-07, | |
| "loss": 0.302, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.7859424920127793, | |
| "grad_norm": 0.5013004618464064, | |
| "learning_rate": 3.0864492533560167e-07, | |
| "loss": 0.3073, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.7891373801916934, | |
| "grad_norm": 0.5139512438606135, | |
| "learning_rate": 2.9954633339621564e-07, | |
| "loss": 0.3061, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.792332268370607, | |
| "grad_norm": 0.4967084848014566, | |
| "learning_rate": 2.905818257394799e-07, | |
| "loss": 0.2385, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.7955271565495208, | |
| "grad_norm": 0.5583189331928233, | |
| "learning_rate": 2.8175152627708425e-07, | |
| "loss": 0.2576, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.7987220447284344, | |
| "grad_norm": 0.5220482463238036, | |
| "learning_rate": 2.730555570656246e-07, | |
| "loss": 0.2427, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.801916932907348, | |
| "grad_norm": 0.47836855934512534, | |
| "learning_rate": 2.6449403830492105e-07, | |
| "loss": 0.2559, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.8051118210862622, | |
| "grad_norm": 0.49773019706098176, | |
| "learning_rate": 2.560670883363592e-07, | |
| "loss": 0.2364, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.8083067092651754, | |
| "grad_norm": 0.49288457060529334, | |
| "learning_rate": 2.4777482364124695e-07, | |
| "loss": 0.2964, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.8115015974440896, | |
| "grad_norm": 0.5198746225532588, | |
| "learning_rate": 2.39617358839207e-07, | |
| "loss": 0.2658, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.8146964856230032, | |
| "grad_norm": 0.4799151728900049, | |
| "learning_rate": 2.315948066866003e-07, | |
| "loss": 0.2402, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.817891373801917, | |
| "grad_norm": 0.465390503048871, | |
| "learning_rate": 2.2370727807495496e-07, | |
| "loss": 0.3056, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.8210862619808306, | |
| "grad_norm": 0.5382332293961877, | |
| "learning_rate": 2.1595488202944104e-07, | |
| "loss": 0.2653, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.8242811501597442, | |
| "grad_norm": 0.5068975952175465, | |
| "learning_rate": 2.0833772570736376e-07, | |
| "loss": 0.3053, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.8274760383386583, | |
| "grad_norm": 0.4757131988181981, | |
| "learning_rate": 2.0085591439667928e-07, | |
| "loss": 0.346, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.830670926517572, | |
| "grad_norm": 0.5434560209318039, | |
| "learning_rate": 1.935095515145391e-07, | |
| "loss": 0.2642, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.8338658146964857, | |
| "grad_norm": 0.5099744081674852, | |
| "learning_rate": 1.8629873860586567e-07, | |
| "loss": 0.2416, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.8370607028753994, | |
| "grad_norm": 0.46460652007791287, | |
| "learning_rate": 1.7922357534194356e-07, | |
| "loss": 0.2865, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.840255591054313, | |
| "grad_norm": 0.6038585562899795, | |
| "learning_rate": 1.7228415951904165e-07, | |
| "loss": 0.317, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.8434504792332267, | |
| "grad_norm": 0.4970433072203688, | |
| "learning_rate": 1.6548058705706528e-07, | |
| "loss": 0.2634, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.8466453674121404, | |
| "grad_norm": 0.45087779573435216, | |
| "learning_rate": 1.5881295199822953e-07, | |
| "loss": 0.2869, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 2.8498402555910545, | |
| "grad_norm": 0.4712297048952234, | |
| "learning_rate": 1.5228134650575265e-07, | |
| "loss": 0.3082, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.853035143769968, | |
| "grad_norm": 0.5542853030279212, | |
| "learning_rate": 1.458858608625957e-07, | |
| "loss": 0.2609, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 2.856230031948882, | |
| "grad_norm": 0.5299351884184478, | |
| "learning_rate": 1.3962658347019819e-07, | |
| "loss": 0.2796, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.8594249201277955, | |
| "grad_norm": 1.7034789905383376, | |
| "learning_rate": 1.335036008472701e-07, | |
| "loss": 0.3549, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.862619808306709, | |
| "grad_norm": 0.4941137486267854, | |
| "learning_rate": 1.2751699762858837e-07, | |
| "loss": 0.2473, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.8658146964856233, | |
| "grad_norm": 0.4652931456111103, | |
| "learning_rate": 1.2166685656382905e-07, | |
| "loss": 0.3353, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.8690095846645365, | |
| "grad_norm": 0.4942389320669494, | |
| "learning_rate": 1.1595325851642137e-07, | |
| "loss": 0.2944, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.8722044728434506, | |
| "grad_norm": 0.48547745486914246, | |
| "learning_rate": 1.103762824624377e-07, | |
| "loss": 0.2688, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.8753993610223643, | |
| "grad_norm": 0.5753452793142592, | |
| "learning_rate": 1.0493600548948879e-07, | |
| "loss": 0.2566, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.878594249201278, | |
| "grad_norm": 0.4972571947816792, | |
| "learning_rate": 9.963250279567239e-08, | |
| "loss": 0.2943, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.8817891373801916, | |
| "grad_norm": 0.5467418327312579, | |
| "learning_rate": 9.446584768852407e-08, | |
| "loss": 0.3408, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.8849840255591053, | |
| "grad_norm": 0.540785739575046, | |
| "learning_rate": 8.943611158400479e-08, | |
| "loss": 0.2589, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.8881789137380194, | |
| "grad_norm": 0.4998267472039897, | |
| "learning_rate": 8.454336400552154e-08, | |
| "loss": 0.302, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.891373801916933, | |
| "grad_norm": 0.5000407109550236, | |
| "learning_rate": 7.978767258295494e-08, | |
| "loss": 0.2799, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.8945686900958467, | |
| "grad_norm": 0.4826382651974826, | |
| "learning_rate": 7.51691030517343e-08, | |
| "loss": 0.3049, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.8977635782747604, | |
| "grad_norm": 0.5533789623454506, | |
| "learning_rate": 7.068771925192286e-08, | |
| "loss": 0.2886, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.900958466453674, | |
| "grad_norm": 0.4856062356070542, | |
| "learning_rate": 6.634358312733957e-08, | |
| "loss": 0.3344, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.9041533546325877, | |
| "grad_norm": 0.5234411083127519, | |
| "learning_rate": 6.21367547246976e-08, | |
| "loss": 0.2975, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.9073482428115014, | |
| "grad_norm": 0.5042769247684148, | |
| "learning_rate": 5.806729219278051e-08, | |
| "loss": 0.3385, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.9105431309904155, | |
| "grad_norm": 0.4910677124873368, | |
| "learning_rate": 5.413525178163292e-08, | |
| "loss": 0.3113, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.913738019169329, | |
| "grad_norm": 0.5181476682018931, | |
| "learning_rate": 5.034068784178892e-08, | |
| "loss": 0.2742, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.916932907348243, | |
| "grad_norm": 0.5220994506844163, | |
| "learning_rate": 4.6683652823513725e-08, | |
| "loss": 0.2551, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.9201277955271565, | |
| "grad_norm": 0.5176699460691047, | |
| "learning_rate": 4.316419727608434e-08, | |
| "loss": 0.2465, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.92332268370607, | |
| "grad_norm": 0.5253749788385152, | |
| "learning_rate": 3.9782369847088944e-08, | |
| "loss": 0.2342, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.9265175718849843, | |
| "grad_norm": 0.49606392975017294, | |
| "learning_rate": 3.653821728175522e-08, | |
| "loss": 0.2832, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.9297124600638975, | |
| "grad_norm": 0.48632265213078496, | |
| "learning_rate": 3.3431784422300884e-08, | |
| "loss": 0.2869, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.9329073482428116, | |
| "grad_norm": 0.5506996313847263, | |
| "learning_rate": 3.046311420731751e-08, | |
| "loss": 0.2121, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.9361022364217253, | |
| "grad_norm": 0.49721273548429923, | |
| "learning_rate": 2.763224767117767e-08, | |
| "loss": 0.3057, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.939297124600639, | |
| "grad_norm": 0.5242997933141604, | |
| "learning_rate": 2.4939223943463153e-08, | |
| "loss": 0.2932, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.9424920127795526, | |
| "grad_norm": 0.48394661634360336, | |
| "learning_rate": 2.2384080248429863e-08, | |
| "loss": 0.3203, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.9456869009584663, | |
| "grad_norm": 0.5321486189218536, | |
| "learning_rate": 1.9966851904487104e-08, | |
| "loss": 0.2875, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.9488817891373804, | |
| "grad_norm": 0.54904053688612, | |
| "learning_rate": 1.768757232371576e-08, | |
| "loss": 0.2645, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.952076677316294, | |
| "grad_norm": 0.48065835080140534, | |
| "learning_rate": 1.554627301140199e-08, | |
| "loss": 0.2675, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.9552715654952078, | |
| "grad_norm": 0.5041424505746223, | |
| "learning_rate": 1.3542983565600909e-08, | |
| "loss": 0.2912, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.9584664536741214, | |
| "grad_norm": 0.5222550703481469, | |
| "learning_rate": 1.1677731676733584e-08, | |
| "loss": 0.2605, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.961661341853035, | |
| "grad_norm": 0.5298221402096054, | |
| "learning_rate": 9.950543127198454e-09, | |
| "loss": 0.2484, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.9648562300319488, | |
| "grad_norm": 0.5060955481899465, | |
| "learning_rate": 8.361441791016056e-09, | |
| "loss": 0.2615, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.9680511182108624, | |
| "grad_norm": 0.5013811699156504, | |
| "learning_rate": 6.910449633501515e-09, | |
| "loss": 0.2774, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.9712460063897765, | |
| "grad_norm": 0.49244223044762647, | |
| "learning_rate": 5.597586710957004e-09, | |
| "loss": 0.323, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.97444089456869, | |
| "grad_norm": 0.46269666655252023, | |
| "learning_rate": 4.422871170398635e-09, | |
| "loss": 0.2762, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.977635782747604, | |
| "grad_norm": 0.4975777250067196, | |
| "learning_rate": 3.386319249303327e-09, | |
| "loss": 0.2915, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.9808306709265175, | |
| "grad_norm": 0.4774284368895711, | |
| "learning_rate": 2.48794527538454e-09, | |
| "loss": 0.3059, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.984025559105431, | |
| "grad_norm": 0.5051541199990559, | |
| "learning_rate": 1.7277616663946562e-09, | |
| "loss": 0.2846, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.987220447284345, | |
| "grad_norm": 0.5314827456627474, | |
| "learning_rate": 1.1057789299517841e-09, | |
| "loss": 0.2911, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "grad_norm": 0.5364041253421318, | |
| "learning_rate": 6.220056633987615e-10, | |
| "loss": 0.2783, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.9936102236421727, | |
| "grad_norm": 0.5356243310916116, | |
| "learning_rate": 2.764485536776995e-10, | |
| "loss": 0.3151, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.9968051118210863, | |
| "grad_norm": 0.49531175646741604, | |
| "learning_rate": 6.911237724560593e-11, | |
| "loss": 0.2669, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.47710393066482504, | |
| "learning_rate": 0.0, | |
| "loss": 0.262, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 939, | |
| "total_flos": 406391461183488.0, | |
| "train_loss": 0.4046504932661026, | |
| "train_runtime": 10117.0351, | |
| "train_samples_per_second": 2.964, | |
| "train_steps_per_second": 0.093 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 939, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 406391461183488.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |