| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.018306636155606407, |
| "eval_steps": 50, |
| "global_step": 1000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 1.8306636155606407e-05, |
| "grad_norm": 6.084787368774414, |
| "learning_rate": 0.0, |
| "loss": 0.6392, |
| "step": 1 |
| }, |
| { |
| "epoch": 3.6613272311212814e-05, |
| "grad_norm": 8.000160217285156, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 0.8375, |
| "step": 2 |
| }, |
| { |
| "epoch": 5.491990846681922e-05, |
| "grad_norm": 9.977635383605957, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 0.4734, |
| "step": 3 |
| }, |
| { |
| "epoch": 7.322654462242563e-05, |
| "grad_norm": 6.60877799987793, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 0.7562, |
| "step": 4 |
| }, |
| { |
| "epoch": 9.153318077803204e-05, |
| "grad_norm": 10.706132888793945, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.0019, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.00010983981693363844, |
| "grad_norm": 10.783683776855469, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.9186, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.00012814645308924485, |
| "grad_norm": 9.012995719909668, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.029, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.00014645308924485126, |
| "grad_norm": 2.700565814971924, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.4689, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.00016475972540045766, |
| "grad_norm": 7.6247029304504395, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.6074, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.00018306636155606407, |
| "grad_norm": 6.389184951782227, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.8861, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.00020137299771167048, |
| "grad_norm": 6.685202121734619, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.5527, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.00021967963386727689, |
| "grad_norm": 6.647484302520752, |
| "learning_rate": 2.2e-06, |
| "loss": 0.5459, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.0002379862700228833, |
| "grad_norm": 7.842504978179932, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.7269, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0002562929061784897, |
| "grad_norm": 9.710944175720215, |
| "learning_rate": 2.6e-06, |
| "loss": 0.7054, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.00027459954233409613, |
| "grad_norm": 10.485865592956543, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 1.1884, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0002929061784897025, |
| "grad_norm": 3.510791301727295, |
| "learning_rate": 3e-06, |
| "loss": 0.5241, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.00031121281464530895, |
| "grad_norm": 11.97901439666748, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 1.0192, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.00032951945080091533, |
| "grad_norm": 8.022102355957031, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.5731, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.00034782608695652176, |
| "grad_norm": 7.800467014312744, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.5588, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.00036613272311212814, |
| "grad_norm": 5.90647029876709, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.4773, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0003844393592677346, |
| "grad_norm": 10.05663776397705, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.8649, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.00040274599542334096, |
| "grad_norm": 3.0191540718078613, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.2516, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.0004210526315789474, |
| "grad_norm": 8.017966270446777, |
| "learning_rate": 4.4e-06, |
| "loss": 0.7442, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.00043935926773455377, |
| "grad_norm": 6.663602352142334, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.6345, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.0004576659038901602, |
| "grad_norm": 8.327048301696777, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.6079, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.0004759725400457666, |
| "grad_norm": 15.23943042755127, |
| "learning_rate": 5e-06, |
| "loss": 0.9292, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.000494279176201373, |
| "grad_norm": 11.373315811157227, |
| "learning_rate": 5.2e-06, |
| "loss": 0.589, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.0005125858123569794, |
| "grad_norm": 9.903393745422363, |
| "learning_rate": 5.400000000000001e-06, |
| "loss": 0.4872, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.0005308924485125858, |
| "grad_norm": 17.918140411376953, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 1.1316, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.0005491990846681923, |
| "grad_norm": 7.308037281036377, |
| "learning_rate": 5.8e-06, |
| "loss": 0.389, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0005675057208237986, |
| "grad_norm": 8.060295104980469, |
| "learning_rate": 6e-06, |
| "loss": 0.582, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.000585812356979405, |
| "grad_norm": 8.332834243774414, |
| "learning_rate": 6.200000000000001e-06, |
| "loss": 1.1902, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.0006041189931350115, |
| "grad_norm": 8.524596214294434, |
| "learning_rate": 6.4000000000000006e-06, |
| "loss": 0.3101, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.0006224256292906179, |
| "grad_norm": 10.255339622497559, |
| "learning_rate": 6.600000000000001e-06, |
| "loss": 0.5759, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.0006407322654462242, |
| "grad_norm": 12.656609535217285, |
| "learning_rate": 6.800000000000001e-06, |
| "loss": 1.0338, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.0006590389016018307, |
| "grad_norm": 6.707568645477295, |
| "learning_rate": 7e-06, |
| "loss": 0.3607, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.0006773455377574371, |
| "grad_norm": 6.263491153717041, |
| "learning_rate": 7.2000000000000005e-06, |
| "loss": 0.4405, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.0006956521739130435, |
| "grad_norm": 5.70526647567749, |
| "learning_rate": 7.4e-06, |
| "loss": 0.5682, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.0007139588100686499, |
| "grad_norm": 4.024957180023193, |
| "learning_rate": 7.600000000000001e-06, |
| "loss": 0.5816, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.0007322654462242563, |
| "grad_norm": 8.361418724060059, |
| "learning_rate": 7.800000000000002e-06, |
| "loss": 0.8628, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.0007505720823798627, |
| "grad_norm": 4.543780326843262, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.3442, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.0007688787185354692, |
| "grad_norm": 8.164483070373535, |
| "learning_rate": 8.2e-06, |
| "loss": 0.5429, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.0007871853546910755, |
| "grad_norm": 7.965429306030273, |
| "learning_rate": 8.400000000000001e-06, |
| "loss": 0.7367, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.0008054919908466819, |
| "grad_norm": 16.735258102416992, |
| "learning_rate": 8.6e-06, |
| "loss": 0.9958, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.0008237986270022883, |
| "grad_norm": 8.915084838867188, |
| "learning_rate": 8.8e-06, |
| "loss": 0.4859, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.0008421052631578948, |
| "grad_norm": 9.113476753234863, |
| "learning_rate": 9e-06, |
| "loss": 1.1163, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.0008604118993135011, |
| "grad_norm": 11.264951705932617, |
| "learning_rate": 9.200000000000002e-06, |
| "loss": 0.7696, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.0008787185354691075, |
| "grad_norm": 11.313631057739258, |
| "learning_rate": 9.4e-06, |
| "loss": 1.0535, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.000897025171624714, |
| "grad_norm": 2.9443583488464355, |
| "learning_rate": 9.600000000000001e-06, |
| "loss": 0.2185, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.0009153318077803204, |
| "grad_norm": 5.448572635650635, |
| "learning_rate": 9.800000000000001e-06, |
| "loss": 0.3536, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0009336384439359267, |
| "grad_norm": 7.4222731590271, |
| "learning_rate": 1e-05, |
| "loss": 0.5806, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.0009519450800915332, |
| "grad_norm": 6.155364990234375, |
| "learning_rate": 1.02e-05, |
| "loss": 0.8657, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.0009702517162471396, |
| "grad_norm": 7.7926249504089355, |
| "learning_rate": 1.04e-05, |
| "loss": 0.8655, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.000988558352402746, |
| "grad_norm": 7.443905830383301, |
| "learning_rate": 1.0600000000000002e-05, |
| "loss": 0.701, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.0010068649885583525, |
| "grad_norm": 5.242746353149414, |
| "learning_rate": 1.0800000000000002e-05, |
| "loss": 0.3073, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.0010251716247139588, |
| "grad_norm": 3.663163661956787, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 0.4435, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.0010434782608695651, |
| "grad_norm": 6.215541839599609, |
| "learning_rate": 1.1200000000000001e-05, |
| "loss": 0.4789, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.0010617848970251717, |
| "grad_norm": 8.457674026489258, |
| "learning_rate": 1.14e-05, |
| "loss": 0.9246, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.001080091533180778, |
| "grad_norm": 5.378246307373047, |
| "learning_rate": 1.16e-05, |
| "loss": 0.3759, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.0010983981693363845, |
| "grad_norm": 10.745940208435059, |
| "learning_rate": 1.18e-05, |
| "loss": 1.2856, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0011167048054919909, |
| "grad_norm": 2.361469030380249, |
| "learning_rate": 1.2e-05, |
| "loss": 0.2053, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.0011350114416475972, |
| "grad_norm": 7.341982841491699, |
| "learning_rate": 1.22e-05, |
| "loss": 0.5777, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.0011533180778032037, |
| "grad_norm": 1.9843751192092896, |
| "learning_rate": 1.2400000000000002e-05, |
| "loss": 0.1818, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.00117162471395881, |
| "grad_norm": 7.640516757965088, |
| "learning_rate": 1.2600000000000001e-05, |
| "loss": 1.0094, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.0011899313501144164, |
| "grad_norm": 15.899582862854004, |
| "learning_rate": 1.2800000000000001e-05, |
| "loss": 1.5402, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.001208237986270023, |
| "grad_norm": 6.037946701049805, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 0.5179, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.0012265446224256293, |
| "grad_norm": 1.6879180669784546, |
| "learning_rate": 1.3200000000000002e-05, |
| "loss": 0.2435, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.0012448512585812358, |
| "grad_norm": 3.539372682571411, |
| "learning_rate": 1.3400000000000002e-05, |
| "loss": 0.2219, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.0012631578947368421, |
| "grad_norm": 9.577892303466797, |
| "learning_rate": 1.3600000000000002e-05, |
| "loss": 1.0921, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.0012814645308924484, |
| "grad_norm": 7.512180328369141, |
| "learning_rate": 1.38e-05, |
| "loss": 1.1851, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.001299771167048055, |
| "grad_norm": 6.8162841796875, |
| "learning_rate": 1.4e-05, |
| "loss": 0.5342, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.0013180778032036613, |
| "grad_norm": 13.964672088623047, |
| "learning_rate": 1.4200000000000001e-05, |
| "loss": 0.8404, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.0013363844393592676, |
| "grad_norm": 10.541035652160645, |
| "learning_rate": 1.4400000000000001e-05, |
| "loss": 0.9331, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.0013546910755148742, |
| "grad_norm": 5.94256067276001, |
| "learning_rate": 1.46e-05, |
| "loss": 0.7509, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.0013729977116704805, |
| "grad_norm": 6.224687099456787, |
| "learning_rate": 1.48e-05, |
| "loss": 0.5732, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.001391304347826087, |
| "grad_norm": 6.311083793640137, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.6101, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.0014096109839816934, |
| "grad_norm": 5.685784339904785, |
| "learning_rate": 1.5200000000000002e-05, |
| "loss": 0.6369, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.0014279176201372997, |
| "grad_norm": 4.5122504234313965, |
| "learning_rate": 1.54e-05, |
| "loss": 0.5075, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.0014462242562929062, |
| "grad_norm": 4.641362190246582, |
| "learning_rate": 1.5600000000000003e-05, |
| "loss": 0.3565, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.0014645308924485126, |
| "grad_norm": 6.5127177238464355, |
| "learning_rate": 1.58e-05, |
| "loss": 0.9192, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.001482837528604119, |
| "grad_norm": 4.107303619384766, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.458, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.0015011441647597254, |
| "grad_norm": 5.691149711608887, |
| "learning_rate": 1.62e-05, |
| "loss": 0.4345, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.0015194508009153318, |
| "grad_norm": 4.126266002655029, |
| "learning_rate": 1.64e-05, |
| "loss": 0.3686, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.0015377574370709383, |
| "grad_norm": 5.336889266967773, |
| "learning_rate": 1.66e-05, |
| "loss": 0.3863, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.0015560640732265446, |
| "grad_norm": 9.846256256103516, |
| "learning_rate": 1.6800000000000002e-05, |
| "loss": 0.8348, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.001574370709382151, |
| "grad_norm": 3.1803081035614014, |
| "learning_rate": 1.7e-05, |
| "loss": 0.5672, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.0015926773455377575, |
| "grad_norm": 4.788447380065918, |
| "learning_rate": 1.72e-05, |
| "loss": 0.3226, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.0016109839816933638, |
| "grad_norm": 4.775529861450195, |
| "learning_rate": 1.7400000000000003e-05, |
| "loss": 0.7328, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.0016292906178489702, |
| "grad_norm": 4.884747505187988, |
| "learning_rate": 1.76e-05, |
| "loss": 0.5481, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.0016475972540045767, |
| "grad_norm": 11.710714340209961, |
| "learning_rate": 1.7800000000000002e-05, |
| "loss": 0.7645, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.001665903890160183, |
| "grad_norm": 7.733964443206787, |
| "learning_rate": 1.8e-05, |
| "loss": 0.6914, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.0016842105263157896, |
| "grad_norm": 3.1426684856414795, |
| "learning_rate": 1.8200000000000002e-05, |
| "loss": 0.214, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.001702517162471396, |
| "grad_norm": 8.66888427734375, |
| "learning_rate": 1.8400000000000003e-05, |
| "loss": 0.7952, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.0017208237986270022, |
| "grad_norm": 4.978145599365234, |
| "learning_rate": 1.86e-05, |
| "loss": 0.7386, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.0017391304347826088, |
| "grad_norm": 9.449872970581055, |
| "learning_rate": 1.88e-05, |
| "loss": 0.7716, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.001757437070938215, |
| "grad_norm": 5.055934429168701, |
| "learning_rate": 1.9e-05, |
| "loss": 0.6527, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.0017757437070938214, |
| "grad_norm": 3.6194591522216797, |
| "learning_rate": 1.9200000000000003e-05, |
| "loss": 0.424, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.001794050343249428, |
| "grad_norm": 5.86330509185791, |
| "learning_rate": 1.94e-05, |
| "loss": 0.4895, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.0018123569794050343, |
| "grad_norm": 5.012547492980957, |
| "learning_rate": 1.9600000000000002e-05, |
| "loss": 0.3257, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.0018306636155606408, |
| "grad_norm": 6.429784297943115, |
| "learning_rate": 1.98e-05, |
| "loss": 0.4644, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0018489702517162471, |
| "grad_norm": 7.005829811096191, |
| "learning_rate": 2e-05, |
| "loss": 0.3813, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.0018672768878718535, |
| "grad_norm": 5.1893415451049805, |
| "learning_rate": 1.9999939076577906e-05, |
| "loss": 0.484, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.00188558352402746, |
| "grad_norm": 5.120594501495361, |
| "learning_rate": 1.9999756307053947e-05, |
| "loss": 0.4228, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.0019038901601830663, |
| "grad_norm": 4.1509904861450195, |
| "learning_rate": 1.9999451693655125e-05, |
| "loss": 0.3841, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.0019221967963386727, |
| "grad_norm": 6.078530788421631, |
| "learning_rate": 1.9999025240093045e-05, |
| "loss": 0.509, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.0019405034324942792, |
| "grad_norm": 4.366968631744385, |
| "learning_rate": 1.9998476951563914e-05, |
| "loss": 0.4813, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.0019588100686498858, |
| "grad_norm": 3.9347426891326904, |
| "learning_rate": 1.9997806834748455e-05, |
| "loss": 0.3545, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.001977116704805492, |
| "grad_norm": 8.536993026733398, |
| "learning_rate": 1.9997014897811834e-05, |
| "loss": 0.7704, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.0019954233409610984, |
| "grad_norm": 4.600913047790527, |
| "learning_rate": 1.9996101150403543e-05, |
| "loss": 0.4502, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.002013729977116705, |
| "grad_norm": 10.658208847045898, |
| "learning_rate": 1.9995065603657317e-05, |
| "loss": 1.076, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.002032036613272311, |
| "grad_norm": 5.670320510864258, |
| "learning_rate": 1.999390827019096e-05, |
| "loss": 0.7519, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.0020503432494279176, |
| "grad_norm": 7.06428337097168, |
| "learning_rate": 1.999262916410621e-05, |
| "loss": 0.4423, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.002068649885583524, |
| "grad_norm": 5.1104254722595215, |
| "learning_rate": 1.9991228300988586e-05, |
| "loss": 0.4446, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.0020869565217391303, |
| "grad_norm": 3.8655929565429688, |
| "learning_rate": 1.998970569790715e-05, |
| "loss": 0.3284, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.002105263157894737, |
| "grad_norm": 2.9013712406158447, |
| "learning_rate": 1.9988061373414342e-05, |
| "loss": 0.3442, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.0021235697940503433, |
| "grad_norm": 13.76830768585205, |
| "learning_rate": 1.9986295347545738e-05, |
| "loss": 1.2942, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.0021418764302059494, |
| "grad_norm": 2.8040542602539062, |
| "learning_rate": 1.9984407641819812e-05, |
| "loss": 0.1902, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.002160183066361556, |
| "grad_norm": 5.416018009185791, |
| "learning_rate": 1.9982398279237657e-05, |
| "loss": 0.5331, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.0021784897025171625, |
| "grad_norm": 4.10576057434082, |
| "learning_rate": 1.9980267284282718e-05, |
| "loss": 0.352, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.002196796338672769, |
| "grad_norm": 3.7827014923095703, |
| "learning_rate": 1.9978014682920503e-05, |
| "loss": 0.3332, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.002215102974828375, |
| "grad_norm": 5.295076847076416, |
| "learning_rate": 1.9975640502598243e-05, |
| "loss": 0.2993, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.0022334096109839817, |
| "grad_norm": 5.779493808746338, |
| "learning_rate": 1.997314477224458e-05, |
| "loss": 0.8626, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.0022517162471395883, |
| "grad_norm": 6.701866626739502, |
| "learning_rate": 1.9970527522269204e-05, |
| "loss": 0.4652, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.0022700228832951944, |
| "grad_norm": 11.795001983642578, |
| "learning_rate": 1.9967788784562474e-05, |
| "loss": 0.4393, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.002288329519450801, |
| "grad_norm": 3.3595681190490723, |
| "learning_rate": 1.9964928592495046e-05, |
| "loss": 0.2178, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.0023066361556064075, |
| "grad_norm": 5.417965412139893, |
| "learning_rate": 1.9961946980917457e-05, |
| "loss": 0.6563, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.0023249427917620136, |
| "grad_norm": 9.861664772033691, |
| "learning_rate": 1.9958843986159705e-05, |
| "loss": 0.9203, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.00234324942791762, |
| "grad_norm": 6.221032619476318, |
| "learning_rate": 1.99556196460308e-05, |
| "loss": 0.7037, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.0023615560640732267, |
| "grad_norm": 2.6676557064056396, |
| "learning_rate": 1.9952273999818312e-05, |
| "loss": 0.1846, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.0023798627002288328, |
| "grad_norm": 4.812030792236328, |
| "learning_rate": 1.9948807088287884e-05, |
| "loss": 0.3774, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.0023981693363844393, |
| "grad_norm": 9.71884822845459, |
| "learning_rate": 1.9945218953682736e-05, |
| "loss": 0.4416, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.002416475972540046, |
| "grad_norm": 5.498316287994385, |
| "learning_rate": 1.9941509639723155e-05, |
| "loss": 0.7989, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.002434782608695652, |
| "grad_norm": 2.3858511447906494, |
| "learning_rate": 1.9937679191605964e-05, |
| "loss": 0.1593, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.0024530892448512585, |
| "grad_norm": 11.50888442993164, |
| "learning_rate": 1.9933727656003964e-05, |
| "loss": 0.5981, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.002471395881006865, |
| "grad_norm": 6.000802516937256, |
| "learning_rate": 1.992965508106537e-05, |
| "loss": 0.5936, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.0024897025171624716, |
| "grad_norm": 5.783041477203369, |
| "learning_rate": 1.9925461516413224e-05, |
| "loss": 0.5644, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.0025080091533180777, |
| "grad_norm": 4.8185954093933105, |
| "learning_rate": 1.9921147013144782e-05, |
| "loss": 0.5223, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.0025263157894736842, |
| "grad_norm": 5.774040222167969, |
| "learning_rate": 1.9916711623830904e-05, |
| "loss": 0.4565, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.0025446224256292908, |
| "grad_norm": 7.319972515106201, |
| "learning_rate": 1.991215540251542e-05, |
| "loss": 0.6701, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.002562929061784897, |
| "grad_norm": 7.840810775756836, |
| "learning_rate": 1.9907478404714438e-05, |
| "loss": 0.7881, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.0025812356979405034, |
| "grad_norm": 3.3393847942352295, |
| "learning_rate": 1.9902680687415704e-05, |
| "loss": 0.3798, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.00259954233409611, |
| "grad_norm": 7.432023048400879, |
| "learning_rate": 1.989776230907789e-05, |
| "loss": 0.4875, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.002617848970251716, |
| "grad_norm": 4.963670253753662, |
| "learning_rate": 1.9892723329629885e-05, |
| "loss": 0.439, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.0026361556064073226, |
| "grad_norm": 5.046631813049316, |
| "learning_rate": 1.988756381047006e-05, |
| "loss": 0.5672, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.002654462242562929, |
| "grad_norm": 7.650580883026123, |
| "learning_rate": 1.988228381446553e-05, |
| "loss": 0.7363, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.0026727688787185353, |
| "grad_norm": 7.517553806304932, |
| "learning_rate": 1.9876883405951378e-05, |
| "loss": 0.707, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.002691075514874142, |
| "grad_norm": 2.3692057132720947, |
| "learning_rate": 1.987136265072988e-05, |
| "loss": 0.2329, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.0027093821510297484, |
| "grad_norm": 3.4414758682250977, |
| "learning_rate": 1.9865721616069695e-05, |
| "loss": 0.3901, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.0027276887871853545, |
| "grad_norm": 4.910306930541992, |
| "learning_rate": 1.985996037070505e-05, |
| "loss": 0.5497, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.002745995423340961, |
| "grad_norm": 6.384312629699707, |
| "learning_rate": 1.9854078984834904e-05, |
| "loss": 0.481, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0027643020594965676, |
| "grad_norm": 3.9407761096954346, |
| "learning_rate": 1.9848077530122083e-05, |
| "loss": 0.2972, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.002782608695652174, |
| "grad_norm": 5.0933122634887695, |
| "learning_rate": 1.984195607969242e-05, |
| "loss": 0.293, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.00280091533180778, |
| "grad_norm": 12.269235610961914, |
| "learning_rate": 1.983571470813386e-05, |
| "loss": 0.7898, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.0028192219679633868, |
| "grad_norm": 7.075507640838623, |
| "learning_rate": 1.9829353491495545e-05, |
| "loss": 0.7744, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.0028375286041189933, |
| "grad_norm": 6.747109413146973, |
| "learning_rate": 1.982287250728689e-05, |
| "loss": 0.8134, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.0028558352402745994, |
| "grad_norm": 6.466359615325928, |
| "learning_rate": 1.9816271834476642e-05, |
| "loss": 0.379, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.002874141876430206, |
| "grad_norm": 4.720534801483154, |
| "learning_rate": 1.9809551553491918e-05, |
| "loss": 0.3114, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.0028924485125858125, |
| "grad_norm": 6.6202921867370605, |
| "learning_rate": 1.9802711746217222e-05, |
| "loss": 0.4371, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.0029107551487414186, |
| "grad_norm": 1.6486483812332153, |
| "learning_rate": 1.979575249599344e-05, |
| "loss": 0.0489, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.002929061784897025, |
| "grad_norm": 7.626447677612305, |
| "learning_rate": 1.9788673887616852e-05, |
| "loss": 0.4217, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.0029473684210526317, |
| "grad_norm": 4.698169231414795, |
| "learning_rate": 1.9781476007338058e-05, |
| "loss": 0.264, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.002965675057208238, |
| "grad_norm": 8.18157958984375, |
| "learning_rate": 1.9774158942860962e-05, |
| "loss": 0.6062, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.0029839816933638443, |
| "grad_norm": 1.4313340187072754, |
| "learning_rate": 1.9766722783341682e-05, |
| "loss": 0.1536, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.003002288329519451, |
| "grad_norm": 5.4740705490112305, |
| "learning_rate": 1.9759167619387474e-05, |
| "loss": 0.4488, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.0030205949656750574, |
| "grad_norm": 8.551219940185547, |
| "learning_rate": 1.9751493543055634e-05, |
| "loss": 0.4957, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.0030389016018306635, |
| "grad_norm": 6.109156131744385, |
| "learning_rate": 1.9743700647852356e-05, |
| "loss": 0.5566, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.00305720823798627, |
| "grad_norm": 8.344240188598633, |
| "learning_rate": 1.9735789028731603e-05, |
| "loss": 0.8809, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.0030755148741418766, |
| "grad_norm": 7.053730010986328, |
| "learning_rate": 1.972775878209397e-05, |
| "loss": 0.6096, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.0030938215102974827, |
| "grad_norm": 8.59719181060791, |
| "learning_rate": 1.9719610005785466e-05, |
| "loss": 0.4776, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.0031121281464530893, |
| "grad_norm": 7.1130571365356445, |
| "learning_rate": 1.971134279909636e-05, |
| "loss": 0.8, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.003130434782608696, |
| "grad_norm": 5.579831123352051, |
| "learning_rate": 1.9702957262759964e-05, |
| "loss": 0.4925, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.003148741418764302, |
| "grad_norm": 3.0703647136688232, |
| "learning_rate": 1.9694453498951392e-05, |
| "loss": 0.2191, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.0031670480549199085, |
| "grad_norm": 2.900186538696289, |
| "learning_rate": 1.9685831611286312e-05, |
| "loss": 0.3114, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.003185354691075515, |
| "grad_norm": 1.6251899003982544, |
| "learning_rate": 1.9677091704819714e-05, |
| "loss": 0.2184, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.003203661327231121, |
| "grad_norm": 8.224297523498535, |
| "learning_rate": 1.9668233886044597e-05, |
| "loss": 0.6323, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.0032219679633867277, |
| "grad_norm": 7.209324836730957, |
| "learning_rate": 1.9659258262890683e-05, |
| "loss": 0.6906, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.003240274599542334, |
| "grad_norm": 4.86000919342041, |
| "learning_rate": 1.9650164944723116e-05, |
| "loss": 0.3577, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.0032585812356979403, |
| "grad_norm": 2.0504913330078125, |
| "learning_rate": 1.96409540423411e-05, |
| "loss": 0.3458, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.003276887871853547, |
| "grad_norm": 3.175110101699829, |
| "learning_rate": 1.9631625667976584e-05, |
| "loss": 0.244, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.0032951945080091534, |
| "grad_norm": 4.946536540985107, |
| "learning_rate": 1.9622179935292855e-05, |
| "loss": 0.4006, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.00331350114416476, |
| "grad_norm": 5.009103298187256, |
| "learning_rate": 1.961261695938319e-05, |
| "loss": 0.2098, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.003331807780320366, |
| "grad_norm": 4.852262020111084, |
| "learning_rate": 1.9602936856769432e-05, |
| "loss": 0.3542, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.0033501144164759726, |
| "grad_norm": 3.4230353832244873, |
| "learning_rate": 1.9593139745400575e-05, |
| "loss": 0.3436, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.003368421052631579, |
| "grad_norm": 4.419967174530029, |
| "learning_rate": 1.9583225744651334e-05, |
| "loss": 0.2836, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.0033867276887871852, |
| "grad_norm": 8.700972557067871, |
| "learning_rate": 1.9573194975320672e-05, |
| "loss": 0.6588, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.003405034324942792, |
| "grad_norm": 8.335065841674805, |
| "learning_rate": 1.9563047559630356e-05, |
| "loss": 0.6679, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.0034233409610983983, |
| "grad_norm": 10.574755668640137, |
| "learning_rate": 1.9552783621223437e-05, |
| "loss": 0.6755, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.0034416475972540044, |
| "grad_norm": 4.3693671226501465, |
| "learning_rate": 1.954240328516277e-05, |
| "loss": 0.2874, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.003459954233409611, |
| "grad_norm": 3.7697315216064453, |
| "learning_rate": 1.9531906677929472e-05, |
| "loss": 0.2167, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.0034782608695652175, |
| "grad_norm": 9.218770027160645, |
| "learning_rate": 1.9521293927421388e-05, |
| "loss": 0.3588, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.0034965675057208236, |
| "grad_norm": 8.993297576904297, |
| "learning_rate": 1.9510565162951538e-05, |
| "loss": 0.7196, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.00351487414187643, |
| "grad_norm": 5.189692974090576, |
| "learning_rate": 1.9499720515246524e-05, |
| "loss": 0.4028, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.0035331807780320367, |
| "grad_norm": 2.8236076831817627, |
| "learning_rate": 1.9488760116444966e-05, |
| "loss": 0.337, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.003551487414187643, |
| "grad_norm": 9.882134437561035, |
| "learning_rate": 1.947768410009586e-05, |
| "loss": 0.8133, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.0035697940503432494, |
| "grad_norm": 6.428056716918945, |
| "learning_rate": 1.9466492601156964e-05, |
| "loss": 0.7583, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.003588100686498856, |
| "grad_norm": 9.280491828918457, |
| "learning_rate": 1.945518575599317e-05, |
| "loss": 0.4404, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.0036064073226544625, |
| "grad_norm": 6.298871040344238, |
| "learning_rate": 1.944376370237481e-05, |
| "loss": 0.3666, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.0036247139588100686, |
| "grad_norm": 6.506961822509766, |
| "learning_rate": 1.943222657947601e-05, |
| "loss": 0.2982, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.003643020594965675, |
| "grad_norm": 4.006402015686035, |
| "learning_rate": 1.942057452787297e-05, |
| "loss": 0.3572, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.0036613272311212816, |
| "grad_norm": 3.9380829334259033, |
| "learning_rate": 1.9408807689542257e-05, |
| "loss": 0.2622, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.0036796338672768878, |
| "grad_norm": 3.6081972122192383, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 0.3038, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.0036979405034324943, |
| "grad_norm": 4.289360046386719, |
| "learning_rate": 1.938493022759556e-05, |
| "loss": 0.2393, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.003716247139588101, |
| "grad_norm": 5.4397358894348145, |
| "learning_rate": 1.937281989491892e-05, |
| "loss": 0.416, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.003734553775743707, |
| "grad_norm": 2.6657931804656982, |
| "learning_rate": 1.9360595357389735e-05, |
| "loss": 0.1847, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.0037528604118993135, |
| "grad_norm": 10.061616897583008, |
| "learning_rate": 1.9348256763960146e-05, |
| "loss": 1.0184, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.00377116704805492, |
| "grad_norm": 5.163506031036377, |
| "learning_rate": 1.9335804264972018e-05, |
| "loss": 0.4527, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.003789473684210526, |
| "grad_norm": 8.941624641418457, |
| "learning_rate": 1.9323238012155125e-05, |
| "loss": 0.4174, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.0038077803203661327, |
| "grad_norm": 10.84901237487793, |
| "learning_rate": 1.9310558158625286e-05, |
| "loss": 0.7763, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.0038260869565217392, |
| "grad_norm": 6.312254428863525, |
| "learning_rate": 1.9297764858882516e-05, |
| "loss": 0.5921, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.0038443935926773453, |
| "grad_norm": 2.949817419052124, |
| "learning_rate": 1.9284858268809135e-05, |
| "loss": 0.1387, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.003862700228832952, |
| "grad_norm": 8.410788536071777, |
| "learning_rate": 1.9271838545667876e-05, |
| "loss": 0.6722, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.0038810068649885584, |
| "grad_norm": 14.2056303024292, |
| "learning_rate": 1.925870584809995e-05, |
| "loss": 1.3406, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.003899313501144165, |
| "grad_norm": 6.280990123748779, |
| "learning_rate": 1.9245460336123136e-05, |
| "loss": 0.2529, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.0039176201372997715, |
| "grad_norm": 3.3927037715911865, |
| "learning_rate": 1.923210217112981e-05, |
| "loss": 0.181, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.003935926773455377, |
| "grad_norm": 4.4074811935424805, |
| "learning_rate": 1.9218631515885007e-05, |
| "loss": 0.3954, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.003954233409610984, |
| "grad_norm": 4.823078155517578, |
| "learning_rate": 1.9205048534524405e-05, |
| "loss": 0.5657, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.00397254004576659, |
| "grad_norm": 5.334933757781982, |
| "learning_rate": 1.9191353392552346e-05, |
| "loss": 0.4124, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.003990846681922197, |
| "grad_norm": 7.765951633453369, |
| "learning_rate": 1.9177546256839814e-05, |
| "loss": 0.4361, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.004009153318077803, |
| "grad_norm": 5.867908000946045, |
| "learning_rate": 1.9163627295622397e-05, |
| "loss": 0.3579, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.00402745995423341, |
| "grad_norm": 6.852147102355957, |
| "learning_rate": 1.914959667849825e-05, |
| "loss": 0.7059, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.0040457665903890164, |
| "grad_norm": 5.715799808502197, |
| "learning_rate": 1.913545457642601e-05, |
| "loss": 0.6624, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.004064073226544622, |
| "grad_norm": 4.819512367248535, |
| "learning_rate": 1.9121201161722732e-05, |
| "loss": 0.3062, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.004082379862700229, |
| "grad_norm": 5.430914878845215, |
| "learning_rate": 1.910683660806177e-05, |
| "loss": 0.664, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.004100686498855835, |
| "grad_norm": 16.048826217651367, |
| "learning_rate": 1.9092361090470688e-05, |
| "loss": 0.9425, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.004118993135011442, |
| "grad_norm": 10.175516128540039, |
| "learning_rate": 1.907777478532909e-05, |
| "loss": 0.4463, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.004137299771167048, |
| "grad_norm": 10.81609058380127, |
| "learning_rate": 1.9063077870366504e-05, |
| "loss": 0.5496, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.004155606407322655, |
| "grad_norm": 3.3909196853637695, |
| "learning_rate": 1.9048270524660197e-05, |
| "loss": 0.2642, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.0041739130434782605, |
| "grad_norm": 7.220968723297119, |
| "learning_rate": 1.903335292863301e-05, |
| "loss": 0.3606, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.004192219679633867, |
| "grad_norm": 2.8659698963165283, |
| "learning_rate": 1.901832526405114e-05, |
| "loss": 0.2331, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.004210526315789474, |
| "grad_norm": 8.508347511291504, |
| "learning_rate": 1.9003187714021936e-05, |
| "loss": 0.5547, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.00422883295194508, |
| "grad_norm": 2.9436657428741455, |
| "learning_rate": 1.8987940462991673e-05, |
| "loss": 0.1906, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.004247139588100687, |
| "grad_norm": 8.40645980834961, |
| "learning_rate": 1.8972583696743284e-05, |
| "loss": 0.5893, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.004265446224256293, |
| "grad_norm": 4.141918182373047, |
| "learning_rate": 1.895711760239413e-05, |
| "loss": 0.5183, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.004283752860411899, |
| "grad_norm": 7.489543914794922, |
| "learning_rate": 1.8941542368393683e-05, |
| "loss": 0.4938, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.0043020594965675054, |
| "grad_norm": 3.483055591583252, |
| "learning_rate": 1.892585818452126e-05, |
| "loss": 0.2687, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.004320366132723112, |
| "grad_norm": 5.23473596572876, |
| "learning_rate": 1.891006524188368e-05, |
| "loss": 0.2855, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.0043386727688787185, |
| "grad_norm": 4.449317455291748, |
| "learning_rate": 1.889416373291298e-05, |
| "loss": 0.3082, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.004356979405034325, |
| "grad_norm": 3.1681277751922607, |
| "learning_rate": 1.8878153851364013e-05, |
| "loss": 0.2741, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.004375286041189932, |
| "grad_norm": 10.63996410369873, |
| "learning_rate": 1.8862035792312148e-05, |
| "loss": 0.6235, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.004393592677345538, |
| "grad_norm": 5.231956958770752, |
| "learning_rate": 1.884580975215084e-05, |
| "loss": 0.4739, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.004411899313501144, |
| "grad_norm": 3.019294023513794, |
| "learning_rate": 1.8829475928589272e-05, |
| "loss": 0.4061, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.00443020594965675, |
| "grad_norm": 5.6815595626831055, |
| "learning_rate": 1.8813034520649923e-05, |
| "loss": 0.4274, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.004448512585812357, |
| "grad_norm": 5.3184123039245605, |
| "learning_rate": 1.879648572866617e-05, |
| "loss": 0.3172, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.0044668192219679635, |
| "grad_norm": 1.4854844808578491, |
| "learning_rate": 1.8779829754279806e-05, |
| "loss": 0.1407, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.00448512585812357, |
| "grad_norm": 9.262789726257324, |
| "learning_rate": 1.8763066800438638e-05, |
| "loss": 0.7686, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.0045034324942791765, |
| "grad_norm": 6.8137359619140625, |
| "learning_rate": 1.874619707139396e-05, |
| "loss": 0.3486, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.004521739130434782, |
| "grad_norm": 11.356719017028809, |
| "learning_rate": 1.8729220772698096e-05, |
| "loss": 0.4303, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.004540045766590389, |
| "grad_norm": 5.79303503036499, |
| "learning_rate": 1.8712138111201898e-05, |
| "loss": 0.3068, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.004558352402745995, |
| "grad_norm": 1.9887948036193848, |
| "learning_rate": 1.869494929505219e-05, |
| "loss": 0.1456, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.004576659038901602, |
| "grad_norm": 10.916764259338379, |
| "learning_rate": 1.8677654533689287e-05, |
| "loss": 1.1486, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.004594965675057208, |
| "grad_norm": 4.801982879638672, |
| "learning_rate": 1.866025403784439e-05, |
| "loss": 0.2377, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.004613272311212815, |
| "grad_norm": 5.9616498947143555, |
| "learning_rate": 1.864274801953705e-05, |
| "loss": 0.213, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.0046315789473684215, |
| "grad_norm": 4.0821099281311035, |
| "learning_rate": 1.8625136692072577e-05, |
| "loss": 0.2355, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.004649885583524027, |
| "grad_norm": 3.7081425189971924, |
| "learning_rate": 1.860742027003944e-05, |
| "loss": 0.2571, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.004668192219679634, |
| "grad_norm": 8.445603370666504, |
| "learning_rate": 1.8589598969306646e-05, |
| "loss": 0.6145, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.00468649885583524, |
| "grad_norm": 11.145668029785156, |
| "learning_rate": 1.8571673007021124e-05, |
| "loss": 0.7036, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.004704805491990847, |
| "grad_norm": 3.8045926094055176, |
| "learning_rate": 1.855364260160507e-05, |
| "loss": 0.3276, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.004723112128146453, |
| "grad_norm": 8.324919700622559, |
| "learning_rate": 1.8535507972753275e-05, |
| "loss": 0.9585, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.00474141876430206, |
| "grad_norm": 9.286887168884277, |
| "learning_rate": 1.851726934143048e-05, |
| "loss": 0.4821, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.0047597254004576655, |
| "grad_norm": 1.0730000734329224, |
| "learning_rate": 1.849892692986864e-05, |
| "loss": 0.1082, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.004778032036613272, |
| "grad_norm": 4.952276229858398, |
| "learning_rate": 1.848048096156426e-05, |
| "loss": 0.2672, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.004796338672768879, |
| "grad_norm": 4.847801208496094, |
| "learning_rate": 1.8461931661275642e-05, |
| "loss": 0.4634, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.004814645308924485, |
| "grad_norm": 2.5741231441497803, |
| "learning_rate": 1.8443279255020153e-05, |
| "loss": 0.209, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.004832951945080092, |
| "grad_norm": 7.57988977432251, |
| "learning_rate": 1.842452397007148e-05, |
| "loss": 0.4493, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.004851258581235698, |
| "grad_norm": 7.51473331451416, |
| "learning_rate": 1.8405666034956842e-05, |
| "loss": 0.6737, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.004869565217391304, |
| "grad_norm": 13.536398887634277, |
| "learning_rate": 1.8386705679454243e-05, |
| "loss": 0.8438, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.0048878718535469105, |
| "grad_norm": 3.2198519706726074, |
| "learning_rate": 1.836764313458962e-05, |
| "loss": 0.251, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.004906178489702517, |
| "grad_norm": 5.983921527862549, |
| "learning_rate": 1.8348478632634067e-05, |
| "loss": 0.3383, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.0049244851258581235, |
| "grad_norm": 10.424074172973633, |
| "learning_rate": 1.8329212407100996e-05, |
| "loss": 1.0936, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.00494279176201373, |
| "grad_norm": 6.141103267669678, |
| "learning_rate": 1.8309844692743283e-05, |
| "loss": 0.5605, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.004961098398169337, |
| "grad_norm": 2.830514430999756, |
| "learning_rate": 1.8290375725550417e-05, |
| "loss": 0.1908, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.004979405034324943, |
| "grad_norm": 6.214183330535889, |
| "learning_rate": 1.827080574274562e-05, |
| "loss": 0.6819, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.004997711670480549, |
| "grad_norm": 7.017670631408691, |
| "learning_rate": 1.8251134982782952e-05, |
| "loss": 0.5617, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.005016018306636155, |
| "grad_norm": 15.681363105773926, |
| "learning_rate": 1.8231363685344422e-05, |
| "loss": 0.8103, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.005034324942791762, |
| "grad_norm": 7.182861328125, |
| "learning_rate": 1.821149209133704e-05, |
| "loss": 0.4888, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.0050526315789473685, |
| "grad_norm": 10.65903377532959, |
| "learning_rate": 1.819152044288992e-05, |
| "loss": 0.7242, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.005070938215102975, |
| "grad_norm": 4.960131645202637, |
| "learning_rate": 1.8171448983351284e-05, |
| "loss": 0.4659, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.0050892448512585816, |
| "grad_norm": 2.2972419261932373, |
| "learning_rate": 1.815127795728554e-05, |
| "loss": 0.1826, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.005107551487414187, |
| "grad_norm": 5.904301166534424, |
| "learning_rate": 1.8131007610470278e-05, |
| "loss": 0.3366, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.005125858123569794, |
| "grad_norm": 5.723453998565674, |
| "learning_rate": 1.8110638189893267e-05, |
| "loss": 0.3315, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.0051441647597254, |
| "grad_norm": 5.037336349487305, |
| "learning_rate": 1.8090169943749477e-05, |
| "loss": 0.3074, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.005162471395881007, |
| "grad_norm": 7.443374156951904, |
| "learning_rate": 1.806960312143802e-05, |
| "loss": 0.4225, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.005180778032036613, |
| "grad_norm": 4.295177459716797, |
| "learning_rate": 1.804893797355914e-05, |
| "loss": 0.3837, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.00519908466819222, |
| "grad_norm": 3.3948891162872314, |
| "learning_rate": 1.8028174751911147e-05, |
| "loss": 0.349, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.0052173913043478265, |
| "grad_norm": 6.52895975112915, |
| "learning_rate": 1.8007313709487334e-05, |
| "loss": 0.4161, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.005235697940503432, |
| "grad_norm": 8.563170433044434, |
| "learning_rate": 1.798635510047293e-05, |
| "loss": 0.5702, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.005254004576659039, |
| "grad_norm": 3.7967607975006104, |
| "learning_rate": 1.7965299180241963e-05, |
| "loss": 0.1922, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.005272311212814645, |
| "grad_norm": 5.493015766143799, |
| "learning_rate": 1.7944146205354182e-05, |
| "loss": 0.6328, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.005290617848970252, |
| "grad_norm": 10.71493148803711, |
| "learning_rate": 1.792289643355191e-05, |
| "loss": 0.6749, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.005308924485125858, |
| "grad_norm": 7.881803512573242, |
| "learning_rate": 1.7901550123756906e-05, |
| "loss": 0.4414, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.005327231121281465, |
| "grad_norm": 3.409003734588623, |
| "learning_rate": 1.788010753606722e-05, |
| "loss": 0.217, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.0053455377574370706, |
| "grad_norm": 5.169612884521484, |
| "learning_rate": 1.785856893175402e-05, |
| "loss": 0.4037, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.005363844393592677, |
| "grad_norm": 5.614249229431152, |
| "learning_rate": 1.78369345732584e-05, |
| "loss": 0.3583, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.005382151029748284, |
| "grad_norm": 5.413997173309326, |
| "learning_rate": 1.781520472418819e-05, |
| "loss": 0.4758, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.00540045766590389, |
| "grad_norm": 5.485922813415527, |
| "learning_rate": 1.7793379649314743e-05, |
| "loss": 0.4584, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.005418764302059497, |
| "grad_norm": 6.144253253936768, |
| "learning_rate": 1.777145961456971e-05, |
| "loss": 0.5489, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.005437070938215103, |
| "grad_norm": 4.56196403503418, |
| "learning_rate": 1.7749444887041797e-05, |
| "loss": 0.4218, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.005455377574370709, |
| "grad_norm": 12.976069450378418, |
| "learning_rate": 1.7727335734973512e-05, |
| "loss": 0.519, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.0054736842105263155, |
| "grad_norm": 10.657815933227539, |
| "learning_rate": 1.7705132427757895e-05, |
| "loss": 0.8982, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.005491990846681922, |
| "grad_norm": 3.892559766769409, |
| "learning_rate": 1.7682835235935236e-05, |
| "loss": 0.2004, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.005510297482837529, |
| "grad_norm": 6.767181396484375, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 0.5441, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.005528604118993135, |
| "grad_norm": 5.241102695465088, |
| "learning_rate": 1.7637960286346423e-05, |
| "loss": 0.3085, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.005546910755148742, |
| "grad_norm": 4.85935640335083, |
| "learning_rate": 1.761538307536737e-05, |
| "loss": 0.4702, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.005565217391304348, |
| "grad_norm": 8.737496376037598, |
| "learning_rate": 1.759271307334881e-05, |
| "loss": 0.7425, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.005583524027459954, |
| "grad_norm": 10.740922927856445, |
| "learning_rate": 1.7569950556517566e-05, |
| "loss": 0.9976, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.00560183066361556, |
| "grad_norm": 7.7551422119140625, |
| "learning_rate": 1.7547095802227723e-05, |
| "loss": 0.5183, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.005620137299771167, |
| "grad_norm": 6.327582836151123, |
| "learning_rate": 1.7524149088957244e-05, |
| "loss": 0.6134, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.0056384439359267735, |
| "grad_norm": 10.561834335327148, |
| "learning_rate": 1.7501110696304598e-05, |
| "loss": 0.6976, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.00565675057208238, |
| "grad_norm": 4.514830112457275, |
| "learning_rate": 1.747798090498532e-05, |
| "loss": 0.3118, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.005675057208237987, |
| "grad_norm": 3.9753475189208984, |
| "learning_rate": 1.7454759996828622e-05, |
| "loss": 0.3003, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.005693363844393592, |
| "grad_norm": 7.0595011711120605, |
| "learning_rate": 1.7431448254773943e-05, |
| "loss": 0.5337, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.005711670480549199, |
| "grad_norm": 4.571330547332764, |
| "learning_rate": 1.74080459628675e-05, |
| "loss": 0.4606, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.005729977116704805, |
| "grad_norm": 7.209574222564697, |
| "learning_rate": 1.7384553406258842e-05, |
| "loss": 0.4431, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.005748283752860412, |
| "grad_norm": 3.1496009826660156, |
| "learning_rate": 1.7360970871197347e-05, |
| "loss": 0.288, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.0057665903890160184, |
| "grad_norm": 1.1498371362686157, |
| "learning_rate": 1.7337298645028764e-05, |
| "loss": 0.0904, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.005784897025171625, |
| "grad_norm": 4.189731121063232, |
| "learning_rate": 1.7313537016191706e-05, |
| "loss": 0.2142, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.0058032036613272315, |
| "grad_norm": 3.495272636413574, |
| "learning_rate": 1.7289686274214116e-05, |
| "loss": 0.2809, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.005821510297482837, |
| "grad_norm": 11.465441703796387, |
| "learning_rate": 1.7265746709709762e-05, |
| "loss": 0.9229, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.005839816933638444, |
| "grad_norm": 3.298283338546753, |
| "learning_rate": 1.7241718614374678e-05, |
| "loss": 0.2105, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.00585812356979405, |
| "grad_norm": 4.148833274841309, |
| "learning_rate": 1.7217602280983622e-05, |
| "loss": 0.2279, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.005876430205949657, |
| "grad_norm": 2.6801838874816895, |
| "learning_rate": 1.7193398003386514e-05, |
| "loss": 0.2207, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.005894736842105263, |
| "grad_norm": 5.371076583862305, |
| "learning_rate": 1.716910607650483e-05, |
| "loss": 0.6822, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.00591304347826087, |
| "grad_norm": 5.155342102050781, |
| "learning_rate": 1.7144726796328034e-05, |
| "loss": 0.6256, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.005931350114416476, |
| "grad_norm": 5.462609767913818, |
| "learning_rate": 1.712026045990997e-05, |
| "loss": 0.3712, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.005949656750572082, |
| "grad_norm": 2.920656204223633, |
| "learning_rate": 1.709570736536521e-05, |
| "loss": 0.2011, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.005967963386727689, |
| "grad_norm": 6.90954065322876, |
| "learning_rate": 1.7071067811865477e-05, |
| "loss": 0.3713, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.005986270022883295, |
| "grad_norm": 4.551551342010498, |
| "learning_rate": 1.7046342099635948e-05, |
| "loss": 0.3052, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.006004576659038902, |
| "grad_norm": 4.894721031188965, |
| "learning_rate": 1.7021530529951627e-05, |
| "loss": 0.8055, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.006022883295194508, |
| "grad_norm": 4.173544406890869, |
| "learning_rate": 1.6996633405133656e-05, |
| "loss": 0.4642, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.006041189931350115, |
| "grad_norm": 5.222576141357422, |
| "learning_rate": 1.697165102854565e-05, |
| "loss": 0.3175, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.0060594965675057205, |
| "grad_norm": 6.459454536437988, |
| "learning_rate": 1.6946583704589973e-05, |
| "loss": 0.5357, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.006077803203661327, |
| "grad_norm": 3.6619327068328857, |
| "learning_rate": 1.692143173870407e-05, |
| "loss": 0.4273, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.006096109839816934, |
| "grad_norm": 5.7568840980529785, |
| "learning_rate": 1.68961954373567e-05, |
| "loss": 0.4508, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.00611441647597254, |
| "grad_norm": 7.910932540893555, |
| "learning_rate": 1.6870875108044233e-05, |
| "loss": 0.4166, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.006132723112128147, |
| "grad_norm": 4.656468391418457, |
| "learning_rate": 1.684547105928689e-05, |
| "loss": 0.3099, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.006151029748283753, |
| "grad_norm": 6.832411289215088, |
| "learning_rate": 1.6819983600624986e-05, |
| "loss": 0.4713, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.006169336384439359, |
| "grad_norm": 3.675230026245117, |
| "learning_rate": 1.6794413042615168e-05, |
| "loss": 0.3749, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.0061876430205949655, |
| "grad_norm": 14.497318267822266, |
| "learning_rate": 1.6768759696826608e-05, |
| "loss": 1.1848, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.006205949656750572, |
| "grad_norm": 8.537349700927734, |
| "learning_rate": 1.6743023875837233e-05, |
| "loss": 0.4933, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.0062242562929061785, |
| "grad_norm": 4.796643257141113, |
| "learning_rate": 1.6717205893229904e-05, |
| "loss": 0.4819, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.006242562929061785, |
| "grad_norm": 5.899059772491455, |
| "learning_rate": 1.6691306063588583e-05, |
| "loss": 0.6742, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.006260869565217392, |
| "grad_norm": 8.666280746459961, |
| "learning_rate": 1.6665324702494524e-05, |
| "loss": 0.5037, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.006279176201372997, |
| "grad_norm": 14.649405479431152, |
| "learning_rate": 1.6639262126522417e-05, |
| "loss": 0.7326, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.006297482837528604, |
| "grad_norm": 6.6728034019470215, |
| "learning_rate": 1.661311865323652e-05, |
| "loss": 0.4329, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.00631578947368421, |
| "grad_norm": 3.9211013317108154, |
| "learning_rate": 1.6586894601186804e-05, |
| "loss": 0.4379, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.006334096109839817, |
| "grad_norm": 5.018696308135986, |
| "learning_rate": 1.6560590289905074e-05, |
| "loss": 0.3585, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.0063524027459954235, |
| "grad_norm": 10.415823936462402, |
| "learning_rate": 1.6534206039901057e-05, |
| "loss": 0.5712, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.00637070938215103, |
| "grad_norm": 3.3000543117523193, |
| "learning_rate": 1.650774217265851e-05, |
| "loss": 0.262, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.0063890160183066366, |
| "grad_norm": 5.603179454803467, |
| "learning_rate": 1.6481199010631312e-05, |
| "loss": 0.2821, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.006407322654462242, |
| "grad_norm": 10.997979164123535, |
| "learning_rate": 1.645457687723951e-05, |
| "loss": 0.8257, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.006425629290617849, |
| "grad_norm": 9.430671691894531, |
| "learning_rate": 1.6427876096865394e-05, |
| "loss": 0.6242, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.006443935926773455, |
| "grad_norm": 4.974905014038086, |
| "learning_rate": 1.6401096994849558e-05, |
| "loss": 0.3398, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.006462242562929062, |
| "grad_norm": 6.002130031585693, |
| "learning_rate": 1.63742398974869e-05, |
| "loss": 0.4058, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.006480549199084668, |
| "grad_norm": 4.933552265167236, |
| "learning_rate": 1.6347305132022677e-05, |
| "loss": 0.4209, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.006498855835240275, |
| "grad_norm": 4.84297513961792, |
| "learning_rate": 1.632029302664851e-05, |
| "loss": 0.3322, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.006517162471395881, |
| "grad_norm": 5.379430294036865, |
| "learning_rate": 1.6293203910498375e-05, |
| "loss": 0.4546, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.006535469107551487, |
| "grad_norm": 3.9145123958587646, |
| "learning_rate": 1.6266038113644605e-05, |
| "loss": 0.3207, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.006553775743707094, |
| "grad_norm": 6.354018211364746, |
| "learning_rate": 1.6238795967093865e-05, |
| "loss": 0.4729, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.0065720823798627, |
| "grad_norm": 9.218265533447266, |
| "learning_rate": 1.6211477802783105e-05, |
| "loss": 0.6496, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.006590389016018307, |
| "grad_norm": 12.858467102050781, |
| "learning_rate": 1.6184083953575543e-05, |
| "loss": 0.4073, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.006608695652173913, |
| "grad_norm": 8.530418395996094, |
| "learning_rate": 1.6156614753256583e-05, |
| "loss": 0.5826, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.00662700228832952, |
| "grad_norm": 8.6444673538208, |
| "learning_rate": 1.6129070536529767e-05, |
| "loss": 0.6264, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.0066453089244851255, |
| "grad_norm": 9.804875373840332, |
| "learning_rate": 1.610145163901268e-05, |
| "loss": 0.4984, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.006663615560640732, |
| "grad_norm": 7.5225324630737305, |
| "learning_rate": 1.607375839723287e-05, |
| "loss": 0.4179, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.006681922196796339, |
| "grad_norm": 6.881463527679443, |
| "learning_rate": 1.6045991148623752e-05, |
| "loss": 0.4956, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.006700228832951945, |
| "grad_norm": 6.102224349975586, |
| "learning_rate": 1.6018150231520486e-05, |
| "loss": 0.4461, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.006718535469107552, |
| "grad_norm": 11.278656005859375, |
| "learning_rate": 1.599023598515586e-05, |
| "loss": 0.8761, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.006736842105263158, |
| "grad_norm": 5.988121032714844, |
| "learning_rate": 1.5962248749656158e-05, |
| "loss": 0.2178, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.006755148741418764, |
| "grad_norm": 0.9311094880104065, |
| "learning_rate": 1.5934188866037017e-05, |
| "loss": 0.0839, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.0067734553775743705, |
| "grad_norm": 5.130373001098633, |
| "learning_rate": 1.5906056676199256e-05, |
| "loss": 0.1672, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.006791762013729977, |
| "grad_norm": 5.0459418296813965, |
| "learning_rate": 1.5877852522924733e-05, |
| "loss": 0.2407, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.006810068649885584, |
| "grad_norm": 2.93607497215271, |
| "learning_rate": 1.584957674987216e-05, |
| "loss": 0.1925, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.00682837528604119, |
| "grad_norm": 12.006101608276367, |
| "learning_rate": 1.5821229701572897e-05, |
| "loss": 0.8912, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.006846681922196797, |
| "grad_norm": 15.027911186218262, |
| "learning_rate": 1.5792811723426787e-05, |
| "loss": 0.4139, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.006864988558352402, |
| "grad_norm": 5.793044090270996, |
| "learning_rate": 1.5764323161697933e-05, |
| "loss": 0.2851, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.006883295194508009, |
| "grad_norm": 8.51421070098877, |
| "learning_rate": 1.573576436351046e-05, |
| "loss": 0.5144, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.006901601830663615, |
| "grad_norm": 9.018112182617188, |
| "learning_rate": 1.570713567684432e-05, |
| "loss": 0.4011, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.006919908466819222, |
| "grad_norm": 12.444863319396973, |
| "learning_rate": 1.5678437450531014e-05, |
| "loss": 0.8046, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.0069382151029748285, |
| "grad_norm": 7.43892765045166, |
| "learning_rate": 1.564967003424938e-05, |
| "loss": 0.4985, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.006956521739130435, |
| "grad_norm": 3.2682807445526123, |
| "learning_rate": 1.5620833778521306e-05, |
| "loss": 0.3606, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.006974828375286042, |
| "grad_norm": 6.130132675170898, |
| "learning_rate": 1.5591929034707468e-05, |
| "loss": 0.5154, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.006993135011441647, |
| "grad_norm": 5.139265060424805, |
| "learning_rate": 1.556295615500305e-05, |
| "loss": 0.3442, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.007011441647597254, |
| "grad_norm": 5.858025074005127, |
| "learning_rate": 1.553391549243344e-05, |
| "loss": 0.316, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.00702974828375286, |
| "grad_norm": 5.114577293395996, |
| "learning_rate": 1.5504807400849957e-05, |
| "loss": 0.3712, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.007048054919908467, |
| "grad_norm": 11.650129318237305, |
| "learning_rate": 1.5475632234925505e-05, |
| "loss": 0.2964, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.007066361556064073, |
| "grad_norm": 4.760644435882568, |
| "learning_rate": 1.5446390350150272e-05, |
| "loss": 0.4069, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.00708466819221968, |
| "grad_norm": 4.894204139709473, |
| "learning_rate": 1.54170821028274e-05, |
| "loss": 0.4782, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.007102974828375286, |
| "grad_norm": 4.8495001792907715, |
| "learning_rate": 1.5387707850068633e-05, |
| "loss": 0.3845, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.007121281464530892, |
| "grad_norm": 3.118293285369873, |
| "learning_rate": 1.5358267949789968e-05, |
| "loss": 0.17, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.007139588100686499, |
| "grad_norm": 4.566890239715576, |
| "learning_rate": 1.53287627607073e-05, |
| "loss": 0.207, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.007157894736842105, |
| "grad_norm": 9.8530855178833, |
| "learning_rate": 1.529919264233205e-05, |
| "loss": 0.5756, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.007176201372997712, |
| "grad_norm": 3.8654870986938477, |
| "learning_rate": 1.5269557954966777e-05, |
| "loss": 0.2833, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.007194508009153318, |
| "grad_norm": 8.076715469360352, |
| "learning_rate": 1.5239859059700794e-05, |
| "loss": 0.434, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.007212814645308925, |
| "grad_norm": 2.7720115184783936, |
| "learning_rate": 1.5210096318405768e-05, |
| "loss": 0.1749, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.007231121281464531, |
| "grad_norm": 3.135864496231079, |
| "learning_rate": 1.5180270093731305e-05, |
| "loss": 0.0707, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.007249427917620137, |
| "grad_norm": 7.354649543762207, |
| "learning_rate": 1.5150380749100545e-05, |
| "loss": 0.5681, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.007267734553775744, |
| "grad_norm": 5.514926433563232, |
| "learning_rate": 1.5120428648705716e-05, |
| "loss": 0.2812, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.00728604118993135, |
| "grad_norm": 4.094933986663818, |
| "learning_rate": 1.5090414157503715e-05, |
| "loss": 0.189, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.007304347826086957, |
| "grad_norm": 1.2968307733535767, |
| "learning_rate": 1.5060337641211637e-05, |
| "loss": 0.1388, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.007322654462242563, |
| "grad_norm": 9.890887260437012, |
| "learning_rate": 1.5030199466302354e-05, |
| "loss": 0.4099, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.007340961098398169, |
| "grad_norm": 7.428032875061035, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.5361, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.0073592677345537755, |
| "grad_norm": 6.782509803771973, |
| "learning_rate": 1.4969739610275556e-05, |
| "loss": 0.3347, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.007377574370709382, |
| "grad_norm": 8.611628532409668, |
| "learning_rate": 1.493941866584231e-05, |
| "loss": 0.6473, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.007395881006864989, |
| "grad_norm": 11.101713180541992, |
| "learning_rate": 1.490903753615141e-05, |
| "loss": 0.5101, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.007414187643020595, |
| "grad_norm": 2.3792383670806885, |
| "learning_rate": 1.4878596591387329e-05, |
| "loss": 0.2176, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.007432494279176202, |
| "grad_norm": 3.647657871246338, |
| "learning_rate": 1.4848096202463373e-05, |
| "loss": 0.1662, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.007450800915331807, |
| "grad_norm": 2.6126086711883545, |
| "learning_rate": 1.4817536741017153e-05, |
| "loss": 0.223, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.007469107551487414, |
| "grad_norm": 7.960146903991699, |
| "learning_rate": 1.478691857940607e-05, |
| "loss": 0.4825, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.0074874141876430204, |
| "grad_norm": 2.9891467094421387, |
| "learning_rate": 1.4756242090702756e-05, |
| "loss": 0.2072, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.007505720823798627, |
| "grad_norm": 7.48441743850708, |
| "learning_rate": 1.4725507648690542e-05, |
| "loss": 0.6046, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.0075240274599542335, |
| "grad_norm": 4.050131320953369, |
| "learning_rate": 1.469471562785891e-05, |
| "loss": 0.343, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.00754233409610984, |
| "grad_norm": 7.872916221618652, |
| "learning_rate": 1.4663866403398915e-05, |
| "loss": 0.6128, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.007560640732265447, |
| "grad_norm": 4.682326793670654, |
| "learning_rate": 1.463296035119862e-05, |
| "loss": 0.3317, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.007578947368421052, |
| "grad_norm": 4.932008266448975, |
| "learning_rate": 1.4601997847838518e-05, |
| "loss": 0.2332, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.007597254004576659, |
| "grad_norm": 7.188138008117676, |
| "learning_rate": 1.4570979270586944e-05, |
| "loss": 0.5127, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.007615560640732265, |
| "grad_norm": 6.72917366027832, |
| "learning_rate": 1.4539904997395468e-05, |
| "loss": 0.2865, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.007633867276887872, |
| "grad_norm": 6.936765670776367, |
| "learning_rate": 1.4508775406894308e-05, |
| "loss": 0.4261, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.0076521739130434785, |
| "grad_norm": 6.002128601074219, |
| "learning_rate": 1.4477590878387697e-05, |
| "loss": 0.3089, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.007670480549199085, |
| "grad_norm": 3.699465036392212, |
| "learning_rate": 1.4446351791849276e-05, |
| "loss": 0.2833, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.007688787185354691, |
| "grad_norm": 5.764729022979736, |
| "learning_rate": 1.4415058527917454e-05, |
| "loss": 0.3462, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.007707093821510297, |
| "grad_norm": 2.2470004558563232, |
| "learning_rate": 1.4383711467890776e-05, |
| "loss": 0.1425, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.007725400457665904, |
| "grad_norm": 3.7865355014801025, |
| "learning_rate": 1.4352310993723277e-05, |
| "loss": 0.1958, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.00774370709382151, |
| "grad_norm": 5.835235595703125, |
| "learning_rate": 1.4320857488019826e-05, |
| "loss": 0.3167, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.007762013729977117, |
| "grad_norm": 4.612227439880371, |
| "learning_rate": 1.4289351334031461e-05, |
| "loss": 0.259, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.007780320366132723, |
| "grad_norm": 5.527163982391357, |
| "learning_rate": 1.4257792915650728e-05, |
| "loss": 0.3875, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.00779862700228833, |
| "grad_norm": 1.5776690244674683, |
| "learning_rate": 1.4226182617406996e-05, |
| "loss": 0.1545, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.007816933638443936, |
| "grad_norm": 5.22613525390625, |
| "learning_rate": 1.4194520824461773e-05, |
| "loss": 0.2102, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.007835240274599543, |
| "grad_norm": 8.434508323669434, |
| "learning_rate": 1.4162807922604014e-05, |
| "loss": 0.4331, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.00785354691075515, |
| "grad_norm": 6.714724063873291, |
| "learning_rate": 1.413104429824542e-05, |
| "loss": 0.4454, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.007871853546910754, |
| "grad_norm": 5.015308856964111, |
| "learning_rate": 1.4099230338415728e-05, |
| "loss": 0.1637, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.007890160183066361, |
| "grad_norm": 5.9333367347717285, |
| "learning_rate": 1.4067366430758004e-05, |
| "loss": 0.3313, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.007908466819221967, |
| "grad_norm": 2.7909371852874756, |
| "learning_rate": 1.4035452963523903e-05, |
| "loss": 0.1595, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.007926773455377574, |
| "grad_norm": 12.026054382324219, |
| "learning_rate": 1.4003490325568953e-05, |
| "loss": 0.5666, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.00794508009153318, |
| "grad_norm": 9.2349271774292, |
| "learning_rate": 1.3971478906347806e-05, |
| "loss": 0.4163, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.007963386727688787, |
| "grad_norm": 4.861546516418457, |
| "learning_rate": 1.3939419095909513e-05, |
| "loss": 0.2674, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.007981693363844394, |
| "grad_norm": 6.877256393432617, |
| "learning_rate": 1.3907311284892737e-05, |
| "loss": 0.2162, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.008, |
| "grad_norm": 9.61545467376709, |
| "learning_rate": 1.3875155864521031e-05, |
| "loss": 0.5273, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.008018306636155607, |
| "grad_norm": 4.861330509185791, |
| "learning_rate": 1.3842953226598036e-05, |
| "loss": 0.1181, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.008036613272311213, |
| "grad_norm": 12.819157600402832, |
| "learning_rate": 1.3810703763502744e-05, |
| "loss": 0.9099, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.00805491990846682, |
| "grad_norm": 11.029340744018555, |
| "learning_rate": 1.3778407868184674e-05, |
| "loss": 0.5066, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.008073226544622426, |
| "grad_norm": 12.776625633239746, |
| "learning_rate": 1.3746065934159123e-05, |
| "loss": 0.8391, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.008091533180778033, |
| "grad_norm": 10.414067268371582, |
| "learning_rate": 1.371367835550235e-05, |
| "loss": 0.579, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.008109839816933638, |
| "grad_norm": 2.4149062633514404, |
| "learning_rate": 1.3681245526846782e-05, |
| "loss": 0.2106, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.008128146453089244, |
| "grad_norm": 3.4909825325012207, |
| "learning_rate": 1.3648767843376196e-05, |
| "loss": 0.2114, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.00814645308924485, |
| "grad_norm": 2.9647722244262695, |
| "learning_rate": 1.3616245700820922e-05, |
| "loss": 0.2153, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.008164759725400457, |
| "grad_norm": 2.9645230770111084, |
| "learning_rate": 1.3583679495453e-05, |
| "loss": 0.1417, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.008183066361556064, |
| "grad_norm": 5.128696918487549, |
| "learning_rate": 1.3551069624081372e-05, |
| "loss": 0.2745, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.00820137299771167, |
| "grad_norm": 8.987137794494629, |
| "learning_rate": 1.3518416484047018e-05, |
| "loss": 0.5339, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.008219679633867277, |
| "grad_norm": 9.38960075378418, |
| "learning_rate": 1.3485720473218153e-05, |
| "loss": 0.4518, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.008237986270022883, |
| "grad_norm": 2.744861364364624, |
| "learning_rate": 1.3452981989985347e-05, |
| "loss": 0.153, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.00825629290617849, |
| "grad_norm": 6.3267998695373535, |
| "learning_rate": 1.342020143325669e-05, |
| "loss": 0.4077, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.008274599542334097, |
| "grad_norm": 9.530548095703125, |
| "learning_rate": 1.3387379202452917e-05, |
| "loss": 0.5712, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.008292906178489703, |
| "grad_norm": 1.2639689445495605, |
| "learning_rate": 1.3354515697502552e-05, |
| "loss": 0.0901, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.00831121281464531, |
| "grad_norm": 10.240217208862305, |
| "learning_rate": 1.3321611318837033e-05, |
| "loss": 0.4039, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.008329519450800914, |
| "grad_norm": 7.718742370605469, |
| "learning_rate": 1.3288666467385834e-05, |
| "loss": 0.5048, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.008347826086956521, |
| "grad_norm": 1.411474347114563, |
| "learning_rate": 1.3255681544571568e-05, |
| "loss": 0.1291, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.008366132723112128, |
| "grad_norm": 3.7180333137512207, |
| "learning_rate": 1.3222656952305113e-05, |
| "loss": 0.1669, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.008384439359267734, |
| "grad_norm": 4.709652423858643, |
| "learning_rate": 1.3189593092980701e-05, |
| "loss": 0.2531, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.00840274599542334, |
| "grad_norm": 2.9395062923431396, |
| "learning_rate": 1.3156490369471026e-05, |
| "loss": 0.1908, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.008421052631578947, |
| "grad_norm": 1.6285467147827148, |
| "learning_rate": 1.3123349185122328e-05, |
| "loss": 0.1829, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.008439359267734554, |
| "grad_norm": 2.359907627105713, |
| "learning_rate": 1.3090169943749475e-05, |
| "loss": 0.078, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.00845766590389016, |
| "grad_norm": 5.721633434295654, |
| "learning_rate": 1.3056953049631059e-05, |
| "loss": 0.5493, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.008475972540045767, |
| "grad_norm": 7.441493034362793, |
| "learning_rate": 1.3023698907504447e-05, |
| "loss": 0.3625, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.008494279176201373, |
| "grad_norm": 8.423327445983887, |
| "learning_rate": 1.2990407922560869e-05, |
| "loss": 0.841, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.00851258581235698, |
| "grad_norm": 13.417081832885742, |
| "learning_rate": 1.2957080500440469e-05, |
| "loss": 0.3607, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.008530892448512586, |
| "grad_norm": 19.080902099609375, |
| "learning_rate": 1.2923717047227368e-05, |
| "loss": 0.8309, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.008549199084668193, |
| "grad_norm": 5.821935176849365, |
| "learning_rate": 1.2890317969444716e-05, |
| "loss": 0.1997, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.008567505720823798, |
| "grad_norm": 6.380805492401123, |
| "learning_rate": 1.2856883674049736e-05, |
| "loss": 0.3366, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.008585812356979404, |
| "grad_norm": 4.085707664489746, |
| "learning_rate": 1.2823414568428767e-05, |
| "loss": 0.2064, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.008604118993135011, |
| "grad_norm": 1.9464811086654663, |
| "learning_rate": 1.2789911060392295e-05, |
| "loss": 0.1908, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.008622425629290617, |
| "grad_norm": 3.138880729675293, |
| "learning_rate": 1.2756373558169992e-05, |
| "loss": 0.1714, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.008640732265446224, |
| "grad_norm": 4.658113479614258, |
| "learning_rate": 1.2722802470405744e-05, |
| "loss": 0.3097, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.00865903890160183, |
| "grad_norm": 4.06643533706665, |
| "learning_rate": 1.2689198206152657e-05, |
| "loss": 0.2363, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.008677345537757437, |
| "grad_norm": 7.818174839019775, |
| "learning_rate": 1.265556117486809e-05, |
| "loss": 0.3208, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.008695652173913044, |
| "grad_norm": 5.261242866516113, |
| "learning_rate": 1.2621891786408648e-05, |
| "loss": 0.3357, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.00871395881006865, |
| "grad_norm": 6.41402006149292, |
| "learning_rate": 1.2588190451025209e-05, |
| "loss": 0.2269, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.008732265446224257, |
| "grad_norm": 3.909989833831787, |
| "learning_rate": 1.2554457579357906e-05, |
| "loss": 0.2456, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.008750572082379863, |
| "grad_norm": 6.242023944854736, |
| "learning_rate": 1.252069358243114e-05, |
| "loss": 0.4592, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.00876887871853547, |
| "grad_norm": 11.872090339660645, |
| "learning_rate": 1.2486898871648552e-05, |
| "loss": 0.6729, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.008787185354691076, |
| "grad_norm": 5.960324287414551, |
| "learning_rate": 1.2453073858788027e-05, |
| "loss": 0.4166, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.008805491990846681, |
| "grad_norm": 1.5860638618469238, |
| "learning_rate": 1.2419218955996677e-05, |
| "loss": 0.1463, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.008823798627002288, |
| "grad_norm": 2.930913209915161, |
| "learning_rate": 1.238533457578581e-05, |
| "loss": 0.1741, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.008842105263157894, |
| "grad_norm": 7.377974033355713, |
| "learning_rate": 1.23514211310259e-05, |
| "loss": 0.4284, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.0088604118993135, |
| "grad_norm": 3.70469069480896, |
| "learning_rate": 1.2317479034941572e-05, |
| "loss": 0.2728, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.008878718535469107, |
| "grad_norm": 5.633390426635742, |
| "learning_rate": 1.2283508701106559e-05, |
| "loss": 0.3421, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.008897025171624714, |
| "grad_norm": 0.9088148474693298, |
| "learning_rate": 1.2249510543438652e-05, |
| "loss": 0.1071, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.00891533180778032, |
| "grad_norm": 10.652883529663086, |
| "learning_rate": 1.2215484976194675e-05, |
| "loss": 0.4244, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.008933638443935927, |
| "grad_norm": 5.040556907653809, |
| "learning_rate": 1.2181432413965428e-05, |
| "loss": 0.2866, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.008951945080091533, |
| "grad_norm": 5.78645133972168, |
| "learning_rate": 1.2147353271670634e-05, |
| "loss": 0.2152, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.00897025171624714, |
| "grad_norm": 8.46097183227539, |
| "learning_rate": 1.211324796455389e-05, |
| "loss": 0.6545, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.008988558352402747, |
| "grad_norm": 5.180769443511963, |
| "learning_rate": 1.2079116908177592e-05, |
| "loss": 0.234, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.009006864988558353, |
| "grad_norm": 5.95151424407959, |
| "learning_rate": 1.2044960518417902e-05, |
| "loss": 0.2579, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.00902517162471396, |
| "grad_norm": 7.002593994140625, |
| "learning_rate": 1.2010779211459649e-05, |
| "loss": 0.5269, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.009043478260869564, |
| "grad_norm": 7.671480178833008, |
| "learning_rate": 1.1976573403791263e-05, |
| "loss": 0.4812, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.009061784897025171, |
| "grad_norm": 7.638554096221924, |
| "learning_rate": 1.194234351219972e-05, |
| "loss": 0.3599, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.009080091533180778, |
| "grad_norm": 6.174886226654053, |
| "learning_rate": 1.190808995376545e-05, |
| "loss": 0.275, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.009098398169336384, |
| "grad_norm": 3.23649525642395, |
| "learning_rate": 1.187381314585725e-05, |
| "loss": 0.179, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.00911670480549199, |
| "grad_norm": 6.402664661407471, |
| "learning_rate": 1.1839513506127202e-05, |
| "loss": 0.3912, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.009135011441647597, |
| "grad_norm": 7.799668788909912, |
| "learning_rate": 1.1805191452505602e-05, |
| "loss": 0.3725, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.009153318077803204, |
| "grad_norm": 13.813634872436523, |
| "learning_rate": 1.1770847403195836e-05, |
| "loss": 0.7032, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.00917162471395881, |
| "grad_norm": 2.5564143657684326, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 0.129, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.009189931350114417, |
| "grad_norm": 4.1047821044921875, |
| "learning_rate": 1.1702094991660326e-05, |
| "loss": 0.2844, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.009208237986270023, |
| "grad_norm": 8.63683032989502, |
| "learning_rate": 1.1667687467161025e-05, |
| "loss": 0.4529, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.00922654462242563, |
| "grad_norm": 5.256021976470947, |
| "learning_rate": 1.1633259622416224e-05, |
| "loss": 0.4106, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.009244851258581236, |
| "grad_norm": 4.8041486740112305, |
| "learning_rate": 1.159881187691835e-05, |
| "loss": 0.2019, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.009263157894736843, |
| "grad_norm": 5.618558406829834, |
| "learning_rate": 1.156434465040231e-05, |
| "loss": 0.3514, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.009281464530892448, |
| "grad_norm": 3.8470354080200195, |
| "learning_rate": 1.1529858362840383e-05, |
| "loss": 0.2582, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.009299771167048054, |
| "grad_norm": 4.10807991027832, |
| "learning_rate": 1.1495353434437098e-05, |
| "loss": 0.2775, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.00931807780320366, |
| "grad_norm": 3.2404677867889404, |
| "learning_rate": 1.1460830285624119e-05, |
| "loss": 0.0828, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.009336384439359267, |
| "grad_norm": 4.644530296325684, |
| "learning_rate": 1.1426289337055119e-05, |
| "loss": 0.2594, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.009354691075514874, |
| "grad_norm": 4.381004810333252, |
| "learning_rate": 1.1391731009600655e-05, |
| "loss": 0.3301, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.00937299771167048, |
| "grad_norm": 2.8095078468322754, |
| "learning_rate": 1.1357155724343046e-05, |
| "loss": 0.1854, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.009391304347826087, |
| "grad_norm": 4.646561622619629, |
| "learning_rate": 1.1322563902571227e-05, |
| "loss": 0.2856, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.009409610983981694, |
| "grad_norm": 2.435589075088501, |
| "learning_rate": 1.128795596577563e-05, |
| "loss": 0.158, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.0094279176201373, |
| "grad_norm": 5.535358905792236, |
| "learning_rate": 1.1253332335643043e-05, |
| "loss": 0.4016, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.009446224256292907, |
| "grad_norm": 2.8487491607666016, |
| "learning_rate": 1.1218693434051475e-05, |
| "loss": 0.1856, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.009464530892448513, |
| "grad_norm": 6.440304756164551, |
| "learning_rate": 1.1184039683065014e-05, |
| "loss": 0.4992, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.00948283752860412, |
| "grad_norm": 4.590803146362305, |
| "learning_rate": 1.1149371504928667e-05, |
| "loss": 0.3192, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.009501144164759726, |
| "grad_norm": 3.818535566329956, |
| "learning_rate": 1.1114689322063255e-05, |
| "loss": 0.2371, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.009519450800915331, |
| "grad_norm": 6.345479965209961, |
| "learning_rate": 1.1079993557060228e-05, |
| "loss": 0.3573, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.009537757437070938, |
| "grad_norm": 13.15767765045166, |
| "learning_rate": 1.1045284632676535e-05, |
| "loss": 0.4145, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.009556064073226544, |
| "grad_norm": 2.1148345470428467, |
| "learning_rate": 1.1010562971829464e-05, |
| "loss": 0.1284, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.00957437070938215, |
| "grad_norm": 5.396875858306885, |
| "learning_rate": 1.0975828997591496e-05, |
| "loss": 0.6029, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.009592677345537757, |
| "grad_norm": 5.095512866973877, |
| "learning_rate": 1.0941083133185146e-05, |
| "loss": 0.316, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.009610983981693364, |
| "grad_norm": 8.121919631958008, |
| "learning_rate": 1.0906325801977804e-05, |
| "loss": 0.3049, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.00962929061784897, |
| "grad_norm": 3.969857931137085, |
| "learning_rate": 1.0871557427476585e-05, |
| "loss": 0.1074, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.009647597254004577, |
| "grad_norm": 12.454261779785156, |
| "learning_rate": 1.083677843332316e-05, |
| "loss": 0.6658, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.009665903890160183, |
| "grad_norm": 4.157962799072266, |
| "learning_rate": 1.0801989243288588e-05, |
| "loss": 0.343, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.00968421052631579, |
| "grad_norm": 0.6788906455039978, |
| "learning_rate": 1.0767190281268187e-05, |
| "loss": 0.0834, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.009702517162471396, |
| "grad_norm": 7.104922294616699, |
| "learning_rate": 1.0732381971276318e-05, |
| "loss": 0.4511, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.009720823798627003, |
| "grad_norm": 10.119832038879395, |
| "learning_rate": 1.0697564737441254e-05, |
| "loss": 0.4547, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.009739130434782608, |
| "grad_norm": 1.6746511459350586, |
| "learning_rate": 1.0662739004000005e-05, |
| "loss": 0.1375, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.009757437070938214, |
| "grad_norm": 9.788320541381836, |
| "learning_rate": 1.0627905195293135e-05, |
| "loss": 0.4845, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.009775743707093821, |
| "grad_norm": 9.758012771606445, |
| "learning_rate": 1.0593063735759619e-05, |
| "loss": 0.4237, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.009794050343249427, |
| "grad_norm": 2.4389655590057373, |
| "learning_rate": 1.055821504993164e-05, |
| "loss": 0.1618, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.009812356979405034, |
| "grad_norm": 10.49396800994873, |
| "learning_rate": 1.0523359562429441e-05, |
| "loss": 0.5892, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.00983066361556064, |
| "grad_norm": 5.466527462005615, |
| "learning_rate": 1.0488497697956134e-05, |
| "loss": 0.2774, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.009848970251716247, |
| "grad_norm": 2.714876890182495, |
| "learning_rate": 1.0453629881292537e-05, |
| "loss": 0.1092, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.009867276887871854, |
| "grad_norm": 4.077674865722656, |
| "learning_rate": 1.0418756537291996e-05, |
| "loss": 0.2116, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.00988558352402746, |
| "grad_norm": 3.0669403076171875, |
| "learning_rate": 1.03838780908752e-05, |
| "loss": 0.1422, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.009903890160183067, |
| "grad_norm": 8.089181900024414, |
| "learning_rate": 1.0348994967025012e-05, |
| "loss": 0.4507, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.009922196796338673, |
| "grad_norm": 3.917191982269287, |
| "learning_rate": 1.0314107590781284e-05, |
| "loss": 0.2293, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.00994050343249428, |
| "grad_norm": 3.5208070278167725, |
| "learning_rate": 1.0279216387235691e-05, |
| "loss": 0.201, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.009958810068649886, |
| "grad_norm": 11.041540145874023, |
| "learning_rate": 1.0244321781526533e-05, |
| "loss": 0.4846, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.009977116704805491, |
| "grad_norm": 5.149035930633545, |
| "learning_rate": 1.0209424198833571e-05, |
| "loss": 0.2859, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.009995423340961098, |
| "grad_norm": 4.3648762702941895, |
| "learning_rate": 1.0174524064372837e-05, |
| "loss": 0.3187, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.010013729977116704, |
| "grad_norm": 14.32425594329834, |
| "learning_rate": 1.0139621803391454e-05, |
| "loss": 0.7906, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.01003203661327231, |
| "grad_norm": 7.4310455322265625, |
| "learning_rate": 1.010471784116246e-05, |
| "loss": 0.3219, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.010050343249427917, |
| "grad_norm": 4.170273780822754, |
| "learning_rate": 1.0069812602979617e-05, |
| "loss": 0.23, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.010068649885583524, |
| "grad_norm": 11.858202934265137, |
| "learning_rate": 1.0034906514152239e-05, |
| "loss": 0.4112, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.01008695652173913, |
| "grad_norm": 7.049015998840332, |
| "learning_rate": 1e-05, |
| "loss": 0.2449, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.010105263157894737, |
| "grad_norm": 8.323487281799316, |
| "learning_rate": 9.965093485847766e-06, |
| "loss": 0.3886, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.010123569794050344, |
| "grad_norm": 1.819872498512268, |
| "learning_rate": 9.930187397020385e-06, |
| "loss": 0.0831, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.01014187643020595, |
| "grad_norm": 8.343167304992676, |
| "learning_rate": 9.895282158837545e-06, |
| "loss": 0.7165, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.010160183066361557, |
| "grad_norm": 6.083837985992432, |
| "learning_rate": 9.860378196608549e-06, |
| "loss": 0.3716, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.010178489702517163, |
| "grad_norm": 5.233552932739258, |
| "learning_rate": 9.825475935627165e-06, |
| "loss": 0.3486, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.01019679633867277, |
| "grad_norm": 2.339454174041748, |
| "learning_rate": 9.790575801166432e-06, |
| "loss": 0.1367, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.010215102974828374, |
| "grad_norm": 5.374737739562988, |
| "learning_rate": 9.75567821847347e-06, |
| "loss": 0.4209, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.010233409610983981, |
| "grad_norm": 8.515985488891602, |
| "learning_rate": 9.720783612764314e-06, |
| "loss": 0.3302, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.010251716247139588, |
| "grad_norm": 9.269789695739746, |
| "learning_rate": 9.685892409218718e-06, |
| "loss": 0.2903, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.010270022883295194, |
| "grad_norm": 3.935189723968506, |
| "learning_rate": 9.651005032974994e-06, |
| "loss": 0.3017, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.0102883295194508, |
| "grad_norm": 10.850844383239746, |
| "learning_rate": 9.616121909124801e-06, |
| "loss": 0.4161, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.010306636155606407, |
| "grad_norm": 9.301300048828125, |
| "learning_rate": 9.581243462708007e-06, |
| "loss": 0.5199, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.010324942791762014, |
| "grad_norm": 5.9267191886901855, |
| "learning_rate": 9.546370118707463e-06, |
| "loss": 0.2799, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.01034324942791762, |
| "grad_norm": 9.080803871154785, |
| "learning_rate": 9.511502302043867e-06, |
| "loss": 0.7154, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.010361556064073227, |
| "grad_norm": 7.055878162384033, |
| "learning_rate": 9.476640437570562e-06, |
| "loss": 0.304, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.010379862700228833, |
| "grad_norm": 2.7219274044036865, |
| "learning_rate": 9.441784950068362e-06, |
| "loss": 0.1827, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.01039816933638444, |
| "grad_norm": 2.3669350147247314, |
| "learning_rate": 9.406936264240386e-06, |
| "loss": 0.1686, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.010416475972540046, |
| "grad_norm": 4.634760856628418, |
| "learning_rate": 9.372094804706867e-06, |
| "loss": 0.2163, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.010434782608695653, |
| "grad_norm": 1.3255306482315063, |
| "learning_rate": 9.337260996000002e-06, |
| "loss": 0.0883, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.010453089244851258, |
| "grad_norm": 2.8958237171173096, |
| "learning_rate": 9.302435262558748e-06, |
| "loss": 0.1593, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.010471395881006864, |
| "grad_norm": 8.792821884155273, |
| "learning_rate": 9.267618028723687e-06, |
| "loss": 0.4316, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.010489702517162471, |
| "grad_norm": 11.037833213806152, |
| "learning_rate": 9.232809718731815e-06, |
| "loss": 0.6522, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.010508009153318077, |
| "grad_norm": 2.741342067718506, |
| "learning_rate": 9.198010756711413e-06, |
| "loss": 0.1564, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.010526315789473684, |
| "grad_norm": 5.826968193054199, |
| "learning_rate": 9.163221566676847e-06, |
| "loss": 0.294, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.01054462242562929, |
| "grad_norm": 2.7347750663757324, |
| "learning_rate": 9.128442572523418e-06, |
| "loss": 0.1361, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.010562929061784897, |
| "grad_norm": 8.53330135345459, |
| "learning_rate": 9.093674198022201e-06, |
| "loss": 0.275, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.010581235697940504, |
| "grad_norm": 3.5783607959747314, |
| "learning_rate": 9.058916866814857e-06, |
| "loss": 0.2126, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.01059954233409611, |
| "grad_norm": 12.933378219604492, |
| "learning_rate": 9.024171002408507e-06, |
| "loss": 0.5151, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.010617848970251717, |
| "grad_norm": 5.769047737121582, |
| "learning_rate": 8.989437028170537e-06, |
| "loss": 0.2029, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.010636155606407323, |
| "grad_norm": 10.376677513122559, |
| "learning_rate": 8.954715367323468e-06, |
| "loss": 0.5334, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.01065446224256293, |
| "grad_norm": 11.327596664428711, |
| "learning_rate": 8.920006442939772e-06, |
| "loss": 0.3964, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.010672768878718536, |
| "grad_norm": 7.816294193267822, |
| "learning_rate": 8.885310677936746e-06, |
| "loss": 0.3265, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.010691075514874141, |
| "grad_norm": 14.427319526672363, |
| "learning_rate": 8.850628495071336e-06, |
| "loss": 0.6769, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.010709382151029748, |
| "grad_norm": 2.7052078247070312, |
| "learning_rate": 8.815960316934991e-06, |
| "loss": 0.1255, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.010727688787185354, |
| "grad_norm": 7.072470188140869, |
| "learning_rate": 8.781306565948528e-06, |
| "loss": 0.4344, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.01074599542334096, |
| "grad_norm": 15.953747749328613, |
| "learning_rate": 8.746667664356957e-06, |
| "loss": 0.6664, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.010764302059496567, |
| "grad_norm": 4.023654937744141, |
| "learning_rate": 8.712044034224374e-06, |
| "loss": 0.1642, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.010782608695652174, |
| "grad_norm": 9.880155563354492, |
| "learning_rate": 8.677436097428775e-06, |
| "loss": 0.5635, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.01080091533180778, |
| "grad_norm": 7.572893142700195, |
| "learning_rate": 8.642844275656957e-06, |
| "loss": 0.6194, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.010819221967963387, |
| "grad_norm": 4.445793151855469, |
| "learning_rate": 8.60826899039935e-06, |
| "loss": 0.2096, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.010837528604118993, |
| "grad_norm": 9.583978652954102, |
| "learning_rate": 8.573710662944884e-06, |
| "loss": 0.2836, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.0108558352402746, |
| "grad_norm": 8.179230690002441, |
| "learning_rate": 8.539169714375885e-06, |
| "loss": 0.3744, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.010874141876430207, |
| "grad_norm": 10.7511568069458, |
| "learning_rate": 8.504646565562907e-06, |
| "loss": 0.5062, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.010892448512585813, |
| "grad_norm": 2.8209664821624756, |
| "learning_rate": 8.47014163715962e-06, |
| "loss": 0.1292, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.010910755148741418, |
| "grad_norm": 5.161312580108643, |
| "learning_rate": 8.43565534959769e-06, |
| "loss": 0.2225, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.010929061784897024, |
| "grad_norm": 6.31545877456665, |
| "learning_rate": 8.401188123081653e-06, |
| "loss": 0.4825, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.010947368421052631, |
| "grad_norm": 7.428010940551758, |
| "learning_rate": 8.366740377583781e-06, |
| "loss": 0.28, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.010965675057208238, |
| "grad_norm": 10.632474899291992, |
| "learning_rate": 8.332312532838978e-06, |
| "loss": 0.4979, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.010983981693363844, |
| "grad_norm": 5.0757880210876465, |
| "learning_rate": 8.297905008339677e-06, |
| "loss": 0.4114, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.01100228832951945, |
| "grad_norm": 10.615399360656738, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 0.4028, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.011020594965675057, |
| "grad_norm": 3.5888783931732178, |
| "learning_rate": 8.22915259680417e-06, |
| "loss": 0.1922, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.011038901601830664, |
| "grad_norm": 7.211901664733887, |
| "learning_rate": 8.194808547494401e-06, |
| "loss": 0.4079, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.01105720823798627, |
| "grad_norm": 2.416304111480713, |
| "learning_rate": 8.1604864938728e-06, |
| "loss": 0.1201, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.011075514874141877, |
| "grad_norm": 4.646475791931152, |
| "learning_rate": 8.126186854142752e-06, |
| "loss": 0.2859, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.011093821510297483, |
| "grad_norm": 3.045999765396118, |
| "learning_rate": 8.091910046234552e-06, |
| "loss": 0.1901, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.01111212814645309, |
| "grad_norm": 0.9611418843269348, |
| "learning_rate": 8.057656487800283e-06, |
| "loss": 0.1063, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.011130434782608696, |
| "grad_norm": 2.64760160446167, |
| "learning_rate": 8.023426596208739e-06, |
| "loss": 0.1395, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.011148741418764301, |
| "grad_norm": 12.34020709991455, |
| "learning_rate": 7.989220788540356e-06, |
| "loss": 0.4971, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.011167048054919908, |
| "grad_norm": 5.713735103607178, |
| "learning_rate": 7.955039481582098e-06, |
| "loss": 0.1714, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.011185354691075514, |
| "grad_norm": 18.283546447753906, |
| "learning_rate": 7.92088309182241e-06, |
| "loss": 0.6558, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.01120366132723112, |
| "grad_norm": 12.52721118927002, |
| "learning_rate": 7.886752035446116e-06, |
| "loss": 0.5603, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.011221967963386727, |
| "grad_norm": 4.516887664794922, |
| "learning_rate": 7.852646728329368e-06, |
| "loss": 0.2777, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.011240274599542334, |
| "grad_norm": 12.416860580444336, |
| "learning_rate": 7.818567586034578e-06, |
| "loss": 0.5139, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.01125858123569794, |
| "grad_norm": 7.2105255126953125, |
| "learning_rate": 7.784515023805328e-06, |
| "loss": 0.3159, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.011276887871853547, |
| "grad_norm": 5.7121100425720215, |
| "learning_rate": 7.750489456561351e-06, |
| "loss": 0.2603, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.011295194508009154, |
| "grad_norm": 8.093098640441895, |
| "learning_rate": 7.716491298893443e-06, |
| "loss": 0.4499, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.01131350114416476, |
| "grad_norm": 4.29197883605957, |
| "learning_rate": 7.68252096505843e-06, |
| "loss": 0.1985, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.011331807780320367, |
| "grad_norm": 2.614251136779785, |
| "learning_rate": 7.6485788689741e-06, |
| "loss": 0.1525, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.011350114416475973, |
| "grad_norm": 7.588433742523193, |
| "learning_rate": 7.6146654242141935e-06, |
| "loss": 0.5013, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.01136842105263158, |
| "grad_norm": 7.058767318725586, |
| "learning_rate": 7.580781044003324e-06, |
| "loss": 0.2994, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.011386727688787185, |
| "grad_norm": 4.412848949432373, |
| "learning_rate": 7.546926141211975e-06, |
| "loss": 0.2056, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.011405034324942791, |
| "grad_norm": 7.570004463195801, |
| "learning_rate": 7.513101128351454e-06, |
| "loss": 0.3943, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.011423340961098398, |
| "grad_norm": 4.8941521644592285, |
| "learning_rate": 7.4793064175688635e-06, |
| "loss": 0.2364, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.011441647597254004, |
| "grad_norm": 7.014512538909912, |
| "learning_rate": 7.445542420642097e-06, |
| "loss": 0.3111, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.01145995423340961, |
| "grad_norm": 2.618640661239624, |
| "learning_rate": 7.411809548974792e-06, |
| "loss": 0.1614, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.011478260869565217, |
| "grad_norm": 5.4425740242004395, |
| "learning_rate": 7.378108213591355e-06, |
| "loss": 0.3619, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.011496567505720824, |
| "grad_norm": 3.9418482780456543, |
| "learning_rate": 7.344438825131912e-06, |
| "loss": 0.1967, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.01151487414187643, |
| "grad_norm": 5.235764026641846, |
| "learning_rate": 7.310801793847344e-06, |
| "loss": 0.2332, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.011533180778032037, |
| "grad_norm": 3.303947687149048, |
| "learning_rate": 7.277197529594257e-06, |
| "loss": 0.1708, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.011551487414187643, |
| "grad_norm": 13.982658386230469, |
| "learning_rate": 7.243626441830009e-06, |
| "loss": 0.62, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.01156979405034325, |
| "grad_norm": 5.784631252288818, |
| "learning_rate": 7.210088939607709e-06, |
| "loss": 0.3401, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.011588100686498857, |
| "grad_norm": 5.933520793914795, |
| "learning_rate": 7.176585431571235e-06, |
| "loss": 0.3589, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.011606407322654463, |
| "grad_norm": 12.28591537475586, |
| "learning_rate": 7.143116325950266e-06, |
| "loss": 0.6409, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.011624713958810068, |
| "grad_norm": 1.4337087869644165, |
| "learning_rate": 7.109682030555283e-06, |
| "loss": 0.0955, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.011643020594965674, |
| "grad_norm": 3.1845862865448, |
| "learning_rate": 7.076282952772634e-06, |
| "loss": 0.2039, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.011661327231121281, |
| "grad_norm": 3.719296455383301, |
| "learning_rate": 7.042919499559538e-06, |
| "loss": 0.1679, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.011679633867276887, |
| "grad_norm": 8.414267539978027, |
| "learning_rate": 7.009592077439135e-06, |
| "loss": 0.4527, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.011697940503432494, |
| "grad_norm": 4.962318420410156, |
| "learning_rate": 6.976301092495556e-06, |
| "loss": 0.257, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.0117162471395881, |
| "grad_norm": 5.534420967102051, |
| "learning_rate": 6.943046950368944e-06, |
| "loss": 0.1673, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.011734553775743707, |
| "grad_norm": 7.917168140411377, |
| "learning_rate": 6.909830056250527e-06, |
| "loss": 0.2011, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.011752860411899314, |
| "grad_norm": 6.127618312835693, |
| "learning_rate": 6.876650814877675e-06, |
| "loss": 0.2565, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.01177116704805492, |
| "grad_norm": 1.4675836563110352, |
| "learning_rate": 6.843509630528977e-06, |
| "loss": 0.0989, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.011789473684210527, |
| "grad_norm": 10.80344009399414, |
| "learning_rate": 6.8104069070193e-06, |
| "loss": 0.5205, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.011807780320366133, |
| "grad_norm": 5.06679630279541, |
| "learning_rate": 6.777343047694891e-06, |
| "loss": 0.2618, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.01182608695652174, |
| "grad_norm": 18.99068832397461, |
| "learning_rate": 6.744318455428436e-06, |
| "loss": 1.1171, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.011844393592677346, |
| "grad_norm": 4.126583576202393, |
| "learning_rate": 6.711333532614168e-06, |
| "loss": 0.2228, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.011862700228832951, |
| "grad_norm": 2.62414288520813, |
| "learning_rate": 6.67838868116297e-06, |
| "loss": 0.1465, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.011881006864988558, |
| "grad_norm": 14.556570053100586, |
| "learning_rate": 6.645484302497452e-06, |
| "loss": 0.4093, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.011899313501144164, |
| "grad_norm": 3.685044288635254, |
| "learning_rate": 6.612620797547087e-06, |
| "loss": 0.2372, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.01191762013729977, |
| "grad_norm": 10.447038650512695, |
| "learning_rate": 6.579798566743314e-06, |
| "loss": 0.3067, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.011935926773455377, |
| "grad_norm": 7.927669525146484, |
| "learning_rate": 6.547018010014654e-06, |
| "loss": 0.5193, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.011954233409610984, |
| "grad_norm": 18.05657386779785, |
| "learning_rate": 6.5142795267818505e-06, |
| "loss": 0.9131, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.01197254004576659, |
| "grad_norm": 3.7571918964385986, |
| "learning_rate": 6.481583515952983e-06, |
| "loss": 0.2054, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.011990846681922197, |
| "grad_norm": 5.364101409912109, |
| "learning_rate": 6.448930375918632e-06, |
| "loss": 0.1835, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.012009153318077804, |
| "grad_norm": 9.139827728271484, |
| "learning_rate": 6.4163205045469975e-06, |
| "loss": 0.76, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.01202745995423341, |
| "grad_norm": 1.704281210899353, |
| "learning_rate": 6.383754299179079e-06, |
| "loss": 0.1308, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.012045766590389017, |
| "grad_norm": 7.968606472015381, |
| "learning_rate": 6.351232156623803e-06, |
| "loss": 0.6407, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.012064073226544623, |
| "grad_norm": 17.709571838378906, |
| "learning_rate": 6.318754473153221e-06, |
| "loss": 0.9402, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.01208237986270023, |
| "grad_norm": 11.887946128845215, |
| "learning_rate": 6.286321644497655e-06, |
| "loss": 0.5389, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.012100686498855834, |
| "grad_norm": 4.999807357788086, |
| "learning_rate": 6.25393406584088e-06, |
| "loss": 0.3483, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.012118993135011441, |
| "grad_norm": 7.6661906242370605, |
| "learning_rate": 6.22159213181533e-06, |
| "loss": 0.4436, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.012137299771167048, |
| "grad_norm": 7.397403240203857, |
| "learning_rate": 6.18929623649726e-06, |
| "loss": 0.3315, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.012155606407322654, |
| "grad_norm": 8.096484184265137, |
| "learning_rate": 6.157046773401964e-06, |
| "loss": 0.3889, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.01217391304347826, |
| "grad_norm": 4.6776251792907715, |
| "learning_rate": 6.124844135478971e-06, |
| "loss": 0.2081, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.012192219679633867, |
| "grad_norm": 8.530952453613281, |
| "learning_rate": 6.092688715107265e-06, |
| "loss": 0.4058, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.012210526315789474, |
| "grad_norm": 9.915332794189453, |
| "learning_rate": 6.06058090409049e-06, |
| "loss": 0.4034, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.01222883295194508, |
| "grad_norm": 16.072465896606445, |
| "learning_rate": 6.028521093652195e-06, |
| "loss": 0.5987, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.012247139588100687, |
| "grad_norm": 4.9345479011535645, |
| "learning_rate": 5.996509674431053e-06, |
| "loss": 0.223, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.012265446224256293, |
| "grad_norm": 4.367536544799805, |
| "learning_rate": 5.9645470364761e-06, |
| "loss": 0.357, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.0122837528604119, |
| "grad_norm": 5.174431324005127, |
| "learning_rate": 5.932633569242e-06, |
| "loss": 0.3424, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.012302059496567506, |
| "grad_norm": 5.893050193786621, |
| "learning_rate": 5.900769661584273e-06, |
| "loss": 0.2669, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.012320366132723111, |
| "grad_norm": 10.575066566467285, |
| "learning_rate": 5.868955701754584e-06, |
| "loss": 0.3644, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.012338672768878718, |
| "grad_norm": 5.934246063232422, |
| "learning_rate": 5.83719207739599e-06, |
| "loss": 0.3487, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.012356979405034324, |
| "grad_norm": 2.32735538482666, |
| "learning_rate": 5.8054791755382286e-06, |
| "loss": 0.1629, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.012375286041189931, |
| "grad_norm": 10.4105224609375, |
| "learning_rate": 5.773817382593008e-06, |
| "loss": 0.577, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.012393592677345537, |
| "grad_norm": 8.903114318847656, |
| "learning_rate": 5.742207084349274e-06, |
| "loss": 0.5795, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.012411899313501144, |
| "grad_norm": 1.7812707424163818, |
| "learning_rate": 5.710648665968543e-06, |
| "loss": 0.0989, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.01243020594965675, |
| "grad_norm": 5.7843017578125, |
| "learning_rate": 5.679142511980176e-06, |
| "loss": 0.331, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.012448512585812357, |
| "grad_norm": 10.94100570678711, |
| "learning_rate": 5.647689006276727e-06, |
| "loss": 0.6601, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.012466819221967964, |
| "grad_norm": 3.161262273788452, |
| "learning_rate": 5.616288532109225e-06, |
| "loss": 0.1807, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.01248512585812357, |
| "grad_norm": 5.56771183013916, |
| "learning_rate": 5.584941472082549e-06, |
| "loss": 0.3165, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.012503432494279177, |
| "grad_norm": 2.321823835372925, |
| "learning_rate": 5.553648208150728e-06, |
| "loss": 0.0925, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.012521739130434783, |
| "grad_norm": 5.972341537475586, |
| "learning_rate": 5.522409121612304e-06, |
| "loss": 0.301, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.01254004576659039, |
| "grad_norm": 14.91727352142334, |
| "learning_rate": 5.491224593105695e-06, |
| "loss": 0.8664, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.012558352402745995, |
| "grad_norm": 4.990755558013916, |
| "learning_rate": 5.460095002604533e-06, |
| "loss": 0.2396, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.012576659038901601, |
| "grad_norm": 3.9700381755828857, |
| "learning_rate": 5.429020729413062e-06, |
| "loss": 0.2423, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.012594965675057208, |
| "grad_norm": 19.159067153930664, |
| "learning_rate": 5.398002152161484e-06, |
| "loss": 0.8266, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.012613272311212814, |
| "grad_norm": 6.696817398071289, |
| "learning_rate": 5.367039648801386e-06, |
| "loss": 0.3413, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.01263157894736842, |
| "grad_norm": 7.948276042938232, |
| "learning_rate": 5.336133596601089e-06, |
| "loss": 0.2789, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.012649885583524027, |
| "grad_norm": 4.675381660461426, |
| "learning_rate": 5.305284372141095e-06, |
| "loss": 0.2435, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.012668192219679634, |
| "grad_norm": 8.05580997467041, |
| "learning_rate": 5.274492351309462e-06, |
| "loss": 0.3781, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.01268649885583524, |
| "grad_norm": 8.211023330688477, |
| "learning_rate": 5.243757909297247e-06, |
| "loss": 0.1573, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.012704805491990847, |
| "grad_norm": 2.2971174716949463, |
| "learning_rate": 5.213081420593933e-06, |
| "loss": 0.1092, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.012723112128146453, |
| "grad_norm": 8.110382080078125, |
| "learning_rate": 5.1824632589828465e-06, |
| "loss": 0.3773, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.01274141876430206, |
| "grad_norm": 4.399291038513184, |
| "learning_rate": 5.151903797536631e-06, |
| "loss": 0.1347, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.012759725400457667, |
| "grad_norm": 6.359978199005127, |
| "learning_rate": 5.121403408612672e-06, |
| "loss": 0.1828, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.012778032036613273, |
| "grad_norm": 3.2302865982055664, |
| "learning_rate": 5.090962463848592e-06, |
| "loss": 0.1979, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.012796338672768878, |
| "grad_norm": 7.771028518676758, |
| "learning_rate": 5.060581334157693e-06, |
| "loss": 0.4509, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.012814645308924484, |
| "grad_norm": 4.159087181091309, |
| "learning_rate": 5.030260389724447e-06, |
| "loss": 0.1944, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.012832951945080091, |
| "grad_norm": 5.334461688995361, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.1792, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.012851258581235698, |
| "grad_norm": 2.703159809112549, |
| "learning_rate": 4.96980053369765e-06, |
| "loss": 0.1473, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.012869565217391304, |
| "grad_norm": 9.341408729553223, |
| "learning_rate": 4.939662358788364e-06, |
| "loss": 0.4148, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.01288787185354691, |
| "grad_norm": 5.660780906677246, |
| "learning_rate": 4.909585842496287e-06, |
| "loss": 0.4013, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.012906178489702517, |
| "grad_norm": 14.756714820861816, |
| "learning_rate": 4.879571351294287e-06, |
| "loss": 0.5132, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.012924485125858124, |
| "grad_norm": 3.988741159439087, |
| "learning_rate": 4.849619250899458e-06, |
| "loss": 0.2555, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.01294279176201373, |
| "grad_norm": 4.656280040740967, |
| "learning_rate": 4.8197299062687e-06, |
| "loss": 0.2938, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.012961098398169337, |
| "grad_norm": 7.464823246002197, |
| "learning_rate": 4.78990368159424e-06, |
| "loss": 0.4899, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.012979405034324943, |
| "grad_norm": 1.0329279899597168, |
| "learning_rate": 4.76014094029921e-06, |
| "loss": 0.0952, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.01299771167048055, |
| "grad_norm": 4.358473300933838, |
| "learning_rate": 4.7304420450332244e-06, |
| "loss": 0.2474, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.013016018306636156, |
| "grad_norm": 4.45465612411499, |
| "learning_rate": 4.700807357667953e-06, |
| "loss": 0.2223, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.013034324942791761, |
| "grad_norm": 2.9852383136749268, |
| "learning_rate": 4.671237239292699e-06, |
| "loss": 0.2014, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.013052631578947368, |
| "grad_norm": 8.365155220031738, |
| "learning_rate": 4.641732050210032e-06, |
| "loss": 0.4184, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.013070938215102974, |
| "grad_norm": 6.666082382202148, |
| "learning_rate": 4.612292149931369e-06, |
| "loss": 0.3087, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.01308924485125858, |
| "grad_norm": 3.9271891117095947, |
| "learning_rate": 4.582917897172603e-06, |
| "loss": 0.1685, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.013107551487414187, |
| "grad_norm": 4.308122634887695, |
| "learning_rate": 4.5536096498497295e-06, |
| "loss": 0.1991, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.013125858123569794, |
| "grad_norm": 4.309776306152344, |
| "learning_rate": 4.524367765074499e-06, |
| "loss": 0.263, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.0131441647597254, |
| "grad_norm": 3.0838520526885986, |
| "learning_rate": 4.495192599150045e-06, |
| "loss": 0.1774, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.013162471395881007, |
| "grad_norm": 7.4343695640563965, |
| "learning_rate": 4.46608450756656e-06, |
| "loss": 0.4462, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.013180778032036614, |
| "grad_norm": 1.0408004522323608, |
| "learning_rate": 4.437043844996952e-06, |
| "loss": 0.0999, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.01319908466819222, |
| "grad_norm": 0.9866322875022888, |
| "learning_rate": 4.408070965292534e-06, |
| "loss": 0.0948, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.013217391304347827, |
| "grad_norm": 1.4886623620986938, |
| "learning_rate": 4.379166221478697e-06, |
| "loss": 0.1075, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.013235697940503433, |
| "grad_norm": 14.501533508300781, |
| "learning_rate": 4.350329965750622e-06, |
| "loss": 0.4004, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.01325400457665904, |
| "grad_norm": 5.454537391662598, |
| "learning_rate": 4.321562549468991e-06, |
| "loss": 0.219, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.013272311212814645, |
| "grad_norm": 7.190273761749268, |
| "learning_rate": 4.292864323155684e-06, |
| "loss": 0.4322, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.013290617848970251, |
| "grad_norm": 13.830406188964844, |
| "learning_rate": 4.264235636489542e-06, |
| "loss": 0.63, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.013308924485125858, |
| "grad_norm": 2.6480069160461426, |
| "learning_rate": 4.235676838302069e-06, |
| "loss": 0.1278, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.013327231121281464, |
| "grad_norm": 13.472162246704102, |
| "learning_rate": 4.207188276573214e-06, |
| "loss": 0.5058, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.01334553775743707, |
| "grad_norm": 2.51118540763855, |
| "learning_rate": 4.178770298427107e-06, |
| "loss": 0.149, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.013363844393592677, |
| "grad_norm": 2.3269636631011963, |
| "learning_rate": 4.150423250127846e-06, |
| "loss": 0.1129, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.013382151029748284, |
| "grad_norm": 1.8809363842010498, |
| "learning_rate": 4.12214747707527e-06, |
| "loss": 0.1052, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.01340045766590389, |
| "grad_norm": 2.9234514236450195, |
| "learning_rate": 4.093943323800746e-06, |
| "loss": 0.1489, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.013418764302059497, |
| "grad_norm": 8.009096145629883, |
| "learning_rate": 4.065811133962987e-06, |
| "loss": 0.2956, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.013437070938215103, |
| "grad_norm": 16.991966247558594, |
| "learning_rate": 4.037751250343841e-06, |
| "loss": 1.0203, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.01345537757437071, |
| "grad_norm": 8.988302230834961, |
| "learning_rate": 4.009764014844143e-06, |
| "loss": 0.4323, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.013473684210526317, |
| "grad_norm": 2.6384384632110596, |
| "learning_rate": 3.981849768479516e-06, |
| "loss": 0.1374, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.013491990846681921, |
| "grad_norm": 6.374378681182861, |
| "learning_rate": 3.954008851376252e-06, |
| "loss": 0.2113, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.013510297482837528, |
| "grad_norm": 8.83869457244873, |
| "learning_rate": 3.9262416027671354e-06, |
| "loss": 0.4818, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.013528604118993134, |
| "grad_norm": 6.571460723876953, |
| "learning_rate": 3.898548360987325e-06, |
| "loss": 0.1552, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.013546910755148741, |
| "grad_norm": 18.608381271362305, |
| "learning_rate": 3.8709294634702374e-06, |
| "loss": 0.3997, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.013565217391304348, |
| "grad_norm": 9.389320373535156, |
| "learning_rate": 3.8433852467434175e-06, |
| "loss": 0.4528, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.013583524027459954, |
| "grad_norm": 8.535320281982422, |
| "learning_rate": 3.81591604642446e-06, |
| "loss": 0.4345, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.01360183066361556, |
| "grad_norm": 1.2252271175384521, |
| "learning_rate": 3.7885221972168974e-06, |
| "loss": 0.1047, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.013620137299771167, |
| "grad_norm": 5.069352149963379, |
| "learning_rate": 3.7612040329061405e-06, |
| "loss": 0.0966, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.013638443935926774, |
| "grad_norm": 9.090739250183105, |
| "learning_rate": 3.7339618863553983e-06, |
| "loss": 0.3199, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.01365675057208238, |
| "grad_norm": 6.387343406677246, |
| "learning_rate": 3.7067960895016277e-06, |
| "loss": 0.2299, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.013675057208237987, |
| "grad_norm": 7.78866720199585, |
| "learning_rate": 3.679706973351491e-06, |
| "loss": 0.4284, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.013693363844393593, |
| "grad_norm": 5.307522773742676, |
| "learning_rate": 3.6526948679773256e-06, |
| "loss": 0.1915, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.0137116704805492, |
| "grad_norm": 8.756507873535156, |
| "learning_rate": 3.625760102513103e-06, |
| "loss": 0.2633, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.013729977116704805, |
| "grad_norm": 3.3090696334838867, |
| "learning_rate": 3.598903005150444e-06, |
| "loss": 0.1686, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.013748283752860411, |
| "grad_norm": 11.242887496948242, |
| "learning_rate": 3.5721239031346067e-06, |
| "loss": 0.9433, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.013766590389016018, |
| "grad_norm": 7.644121170043945, |
| "learning_rate": 3.545423122760493e-06, |
| "loss": 0.3955, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.013784897025171624, |
| "grad_norm": 9.132620811462402, |
| "learning_rate": 3.5188009893686916e-06, |
| "loss": 0.5613, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.01380320366132723, |
| "grad_norm": 8.751481056213379, |
| "learning_rate": 3.492257827341492e-06, |
| "loss": 0.5503, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.013821510297482837, |
| "grad_norm": 7.459012985229492, |
| "learning_rate": 3.4657939600989453e-06, |
| "loss": 0.2748, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.013839816933638444, |
| "grad_norm": 7.091245651245117, |
| "learning_rate": 3.4394097100949286e-06, |
| "loss": 0.3414, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.01385812356979405, |
| "grad_norm": 6.701774597167969, |
| "learning_rate": 3.4131053988131947e-06, |
| "loss": 0.2112, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.013876430205949657, |
| "grad_norm": 5.4992475509643555, |
| "learning_rate": 3.3868813467634833e-06, |
| "loss": 0.2098, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.013894736842105264, |
| "grad_norm": 4.821681976318359, |
| "learning_rate": 3.360737873477584e-06, |
| "loss": 0.2729, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.01391304347826087, |
| "grad_norm": 4.573626518249512, |
| "learning_rate": 3.3346752975054763e-06, |
| "loss": 0.3077, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.013931350114416477, |
| "grad_norm": 5.970454692840576, |
| "learning_rate": 3.308693936411421e-06, |
| "loss": 0.2764, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.013949656750572083, |
| "grad_norm": 15.272013664245605, |
| "learning_rate": 3.2827941067700996e-06, |
| "loss": 0.3641, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.013967963386727688, |
| "grad_norm": 7.55459451675415, |
| "learning_rate": 3.2569761241627694e-06, |
| "loss": 0.1848, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.013986270022883295, |
| "grad_norm": 4.483034133911133, |
| "learning_rate": 3.2312403031733943e-06, |
| "loss": 0.221, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.014004576659038901, |
| "grad_norm": 4.3897624015808105, |
| "learning_rate": 3.2055869573848374e-06, |
| "loss": 0.175, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.014022883295194508, |
| "grad_norm": 8.360304832458496, |
| "learning_rate": 3.1800163993750166e-06, |
| "loss": 0.3661, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.014041189931350114, |
| "grad_norm": 5.351900100708008, |
| "learning_rate": 3.1545289407131128e-06, |
| "loss": 0.1164, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.01405949656750572, |
| "grad_norm": 6.663723468780518, |
| "learning_rate": 3.1291248919557717e-06, |
| "loss": 0.1693, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.014077803203661327, |
| "grad_norm": 2.0629947185516357, |
| "learning_rate": 3.103804562643302e-06, |
| "loss": 0.0827, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.014096109839816934, |
| "grad_norm": 4.476418495178223, |
| "learning_rate": 3.0785682612959334e-06, |
| "loss": 0.238, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.01411441647597254, |
| "grad_norm": 1.5098648071289062, |
| "learning_rate": 3.0534162954100264e-06, |
| "loss": 0.1068, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.014132723112128147, |
| "grad_norm": 10.149811744689941, |
| "learning_rate": 3.028348971454356e-06, |
| "loss": 0.3284, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.014151029748283753, |
| "grad_norm": 5.156161785125732, |
| "learning_rate": 3.003366594866345e-06, |
| "loss": 0.2241, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.01416933638443936, |
| "grad_norm": 3.282696008682251, |
| "learning_rate": 2.978469470048376e-06, |
| "loss": 0.1718, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.014187643020594966, |
| "grad_norm": 7.071142196655273, |
| "learning_rate": 2.953657900364053e-06, |
| "loss": 0.4103, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.014205949656750571, |
| "grad_norm": 3.6657493114471436, |
| "learning_rate": 2.9289321881345257e-06, |
| "loss": 0.1982, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.014224256292906178, |
| "grad_norm": 5.000716209411621, |
| "learning_rate": 2.9042926346347932e-06, |
| "loss": 0.193, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.014242562929061784, |
| "grad_norm": 2.750865936279297, |
| "learning_rate": 2.8797395400900362e-06, |
| "loss": 0.1396, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.014260869565217391, |
| "grad_norm": 8.415980339050293, |
| "learning_rate": 2.855273203671969e-06, |
| "loss": 0.5748, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.014279176201372997, |
| "grad_norm": 4.887707710266113, |
| "learning_rate": 2.830893923495173e-06, |
| "loss": 0.2645, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.014297482837528604, |
| "grad_norm": 4.593505382537842, |
| "learning_rate": 2.8066019966134907e-06, |
| "loss": 0.212, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.01431578947368421, |
| "grad_norm": 4.513943672180176, |
| "learning_rate": 2.7823977190163788e-06, |
| "loss": 0.2007, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.014334096109839817, |
| "grad_norm": 1.8813406229019165, |
| "learning_rate": 2.7582813856253276e-06, |
| "loss": 0.1123, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.014352402745995424, |
| "grad_norm": 7.425732612609863, |
| "learning_rate": 2.7342532902902418e-06, |
| "loss": 0.2917, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.01437070938215103, |
| "grad_norm": 3.4717419147491455, |
| "learning_rate": 2.7103137257858867e-06, |
| "loss": 0.1469, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.014389016018306637, |
| "grad_norm": 3.6180458068847656, |
| "learning_rate": 2.6864629838082957e-06, |
| "loss": 0.1825, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.014407322654462243, |
| "grad_norm": 11.820043563842773, |
| "learning_rate": 2.6627013549712355e-06, |
| "loss": 0.3843, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.01442562929061785, |
| "grad_norm": 1.4070264101028442, |
| "learning_rate": 2.639029128802657e-06, |
| "loss": 0.1179, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.014443935926773455, |
| "grad_norm": 3.2094695568084717, |
| "learning_rate": 2.615446593741161e-06, |
| "loss": 0.1432, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.014462242562929061, |
| "grad_norm": 11.187945365905762, |
| "learning_rate": 2.5919540371325005e-06, |
| "loss": 0.5913, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.014480549199084668, |
| "grad_norm": 8.291489601135254, |
| "learning_rate": 2.5685517452260566e-06, |
| "loss": 0.5073, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.014498855835240274, |
| "grad_norm": 3.545529365539551, |
| "learning_rate": 2.5452400031713786e-06, |
| "loss": 0.1465, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.01451716247139588, |
| "grad_norm": 7.315761566162109, |
| "learning_rate": 2.522019095014683e-06, |
| "loss": 0.624, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.014535469107551487, |
| "grad_norm": 11.311168670654297, |
| "learning_rate": 2.4988893036954045e-06, |
| "loss": 0.5427, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.014553775743707094, |
| "grad_norm": 7.208177089691162, |
| "learning_rate": 2.4758509110427576e-06, |
| "loss": 0.2099, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.0145720823798627, |
| "grad_norm": 6.952854156494141, |
| "learning_rate": 2.45290419777228e-06, |
| "loss": 0.2803, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.014590389016018307, |
| "grad_norm": 5.105114459991455, |
| "learning_rate": 2.4300494434824373e-06, |
| "loss": 0.2132, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.014608695652173913, |
| "grad_norm": 5.196943283081055, |
| "learning_rate": 2.407286926651192e-06, |
| "loss": 0.2229, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.01462700228832952, |
| "grad_norm": 5.220571041107178, |
| "learning_rate": 2.3846169246326345e-06, |
| "loss": 0.2873, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.014645308924485127, |
| "grad_norm": 2.8920493125915527, |
| "learning_rate": 2.362039713653581e-06, |
| "loss": 0.1196, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.014663615560640733, |
| "grad_norm": 6.959986686706543, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 0.2673, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.014681922196796338, |
| "grad_norm": 19.750226974487305, |
| "learning_rate": 2.317164764064769e-06, |
| "loss": 0.2558, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.014700228832951944, |
| "grad_norm": 4.109073638916016, |
| "learning_rate": 2.2948675722421086e-06, |
| "loss": 0.2048, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.014718535469107551, |
| "grad_norm": 6.572871208190918, |
| "learning_rate": 2.27266426502649e-06, |
| "loss": 0.5073, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.014736842105263158, |
| "grad_norm": 5.497032642364502, |
| "learning_rate": 2.2505551129582047e-06, |
| "loss": 0.2909, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.014755148741418764, |
| "grad_norm": 4.798393726348877, |
| "learning_rate": 2.2285403854302912e-06, |
| "loss": 0.1645, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.01477345537757437, |
| "grad_norm": 6.386361122131348, |
| "learning_rate": 2.206620350685257e-06, |
| "loss": 0.2036, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.014791762013729977, |
| "grad_norm": 15.141334533691406, |
| "learning_rate": 2.1847952758118118e-06, |
| "loss": 0.6469, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.014810068649885584, |
| "grad_norm": 2.4272351264953613, |
| "learning_rate": 2.163065426741603e-06, |
| "loss": 0.1063, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.01482837528604119, |
| "grad_norm": 9.701894760131836, |
| "learning_rate": 2.1414310682459805e-06, |
| "loss": 0.7668, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.014846681922196797, |
| "grad_norm": 2.3073835372924805, |
| "learning_rate": 2.119892463932781e-06, |
| "loss": 0.161, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.014864988558352403, |
| "grad_norm": 8.390331268310547, |
| "learning_rate": 2.098449876243096e-06, |
| "loss": 0.3498, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.01488329519450801, |
| "grad_norm": 5.491335868835449, |
| "learning_rate": 2.0771035664480944e-06, |
| "loss": 0.2787, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.014901601830663615, |
| "grad_norm": 4.279118061065674, |
| "learning_rate": 2.0558537946458177e-06, |
| "loss": 0.1625, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.014919908466819221, |
| "grad_norm": 7.896650791168213, |
| "learning_rate": 2.0347008197580376e-06, |
| "loss": 0.3681, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.014938215102974828, |
| "grad_norm": 10.665939331054688, |
| "learning_rate": 2.013644899527074e-06, |
| "loss": 0.4568, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.014956521739130434, |
| "grad_norm": 24.828960418701172, |
| "learning_rate": 1.9926862905126663e-06, |
| "loss": 1.5729, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.014974828375286041, |
| "grad_norm": 12.46844482421875, |
| "learning_rate": 1.9718252480888567e-06, |
| "loss": 0.587, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.014993135011441647, |
| "grad_norm": 6.524204730987549, |
| "learning_rate": 1.95106202644086e-06, |
| "loss": 0.2851, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.015011441647597254, |
| "grad_norm": 9.998326301574707, |
| "learning_rate": 1.930396878561983e-06, |
| "loss": 0.5134, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.01502974828375286, |
| "grad_norm": 15.49677848815918, |
| "learning_rate": 1.9098300562505266e-06, |
| "loss": 0.6058, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.015048054919908467, |
| "grad_norm": 5.195704460144043, |
| "learning_rate": 1.8893618101067357e-06, |
| "loss": 0.2136, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.015066361556064074, |
| "grad_norm": 7.91325044631958, |
| "learning_rate": 1.8689923895297247e-06, |
| "loss": 0.3135, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.01508466819221968, |
| "grad_norm": 12.137787818908691, |
| "learning_rate": 1.848722042714457e-06, |
| "loss": 0.9225, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.015102974828375287, |
| "grad_norm": 5.33144998550415, |
| "learning_rate": 1.8285510166487154e-06, |
| "loss": 0.2151, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.015121281464530893, |
| "grad_norm": 6.352451324462891, |
| "learning_rate": 1.808479557110081e-06, |
| "loss": 0.2846, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.015139588100686498, |
| "grad_norm": 5.770841598510742, |
| "learning_rate": 1.7885079086629598e-06, |
| "loss": 0.258, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.015157894736842105, |
| "grad_norm": 9.67846393585205, |
| "learning_rate": 1.7686363146555807e-06, |
| "loss": 0.359, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.015176201372997711, |
| "grad_norm": 1.321305751800537, |
| "learning_rate": 1.7488650172170496e-06, |
| "loss": 0.0735, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.015194508009153318, |
| "grad_norm": 14.980170249938965, |
| "learning_rate": 1.7291942572543806e-06, |
| "loss": 0.3208, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.015212814645308924, |
| "grad_norm": 12.814326286315918, |
| "learning_rate": 1.709624274449584e-06, |
| "loss": 0.5511, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.01523112128146453, |
| "grad_norm": 9.458513259887695, |
| "learning_rate": 1.6901553072567189e-06, |
| "loss": 0.3438, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.015249427917620137, |
| "grad_norm": 2.2716009616851807, |
| "learning_rate": 1.6707875928990059e-06, |
| "loss": 0.158, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.015267734553775744, |
| "grad_norm": 4.668413162231445, |
| "learning_rate": 1.651521367365936e-06, |
| "loss": 0.2095, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.01528604118993135, |
| "grad_norm": 6.753487586975098, |
| "learning_rate": 1.6323568654103838e-06, |
| "loss": 0.3238, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.015304347826086957, |
| "grad_norm": 4.7871785163879395, |
| "learning_rate": 1.6132943205457607e-06, |
| "loss": 0.2238, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.015322654462242563, |
| "grad_norm": 9.5857515335083, |
| "learning_rate": 1.5943339650431578e-06, |
| "loss": 0.3882, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.01534096109839817, |
| "grad_norm": 5.032664775848389, |
| "learning_rate": 1.5754760299285255e-06, |
| "loss": 0.2316, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.015359267734553777, |
| "grad_norm": 0.9537063837051392, |
| "learning_rate": 1.5567207449798517e-06, |
| "loss": 0.0631, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.015377574370709381, |
| "grad_norm": 3.6486315727233887, |
| "learning_rate": 1.538068338724361e-06, |
| "loss": 0.1585, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.015395881006864988, |
| "grad_norm": 7.9478840827941895, |
| "learning_rate": 1.5195190384357405e-06, |
| "loss": 0.3083, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.015414187643020594, |
| "grad_norm": 4.013637542724609, |
| "learning_rate": 1.5010730701313626e-06, |
| "loss": 0.1468, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.015432494279176201, |
| "grad_norm": 13.950557708740234, |
| "learning_rate": 1.4827306585695234e-06, |
| "loss": 0.4453, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.015450800915331808, |
| "grad_norm": 8.558002471923828, |
| "learning_rate": 1.4644920272467245e-06, |
| "loss": 0.5517, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.015469107551487414, |
| "grad_norm": 4.049571514129639, |
| "learning_rate": 1.446357398394934e-06, |
| "loss": 0.2165, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.01548741418764302, |
| "grad_norm": 7.492833137512207, |
| "learning_rate": 1.4283269929788779e-06, |
| "loss": 0.3542, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.015505720823798627, |
| "grad_norm": 4.005830764770508, |
| "learning_rate": 1.4104010306933558e-06, |
| "loss": 0.2093, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.015524027459954234, |
| "grad_norm": 4.699175834655762, |
| "learning_rate": 1.3925797299605649e-06, |
| "loss": 0.1869, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.01554233409610984, |
| "grad_norm": 7.27205228805542, |
| "learning_rate": 1.3748633079274254e-06, |
| "loss": 0.3142, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.015560640732265447, |
| "grad_norm": 6.372773170471191, |
| "learning_rate": 1.3572519804629537e-06, |
| "loss": 0.2057, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.015578947368421053, |
| "grad_norm": 6.957703590393066, |
| "learning_rate": 1.339745962155613e-06, |
| "loss": 0.2801, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.01559725400457666, |
| "grad_norm": 7.599903106689453, |
| "learning_rate": 1.322345466310717e-06, |
| "loss": 0.2687, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.015615560640732265, |
| "grad_norm": 5.099765300750732, |
| "learning_rate": 1.30505070494781e-06, |
| "loss": 0.2243, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.015633867276887873, |
| "grad_norm": 8.895179748535156, |
| "learning_rate": 1.2878618887981064e-06, |
| "loss": 0.4707, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.01565217391304348, |
| "grad_norm": 2.2955360412597656, |
| "learning_rate": 1.2707792273019049e-06, |
| "loss": 0.0989, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.015670480549199086, |
| "grad_norm": 7.87874698638916, |
| "learning_rate": 1.2538029286060428e-06, |
| "loss": 0.376, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.015688787185354693, |
| "grad_norm": 9.83598518371582, |
| "learning_rate": 1.2369331995613664e-06, |
| "loss": 0.6476, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.0157070938215103, |
| "grad_norm": 2.0399458408355713, |
| "learning_rate": 1.2201702457201948e-06, |
| "loss": 0.1026, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.015725400457665902, |
| "grad_norm": 6.7959442138671875, |
| "learning_rate": 1.2035142713338366e-06, |
| "loss": 0.1965, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.01574370709382151, |
| "grad_norm": 9.003751754760742, |
| "learning_rate": 1.1869654793500784e-06, |
| "loss": 0.5292, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.015762013729977115, |
| "grad_norm": 11.155455589294434, |
| "learning_rate": 1.1705240714107301e-06, |
| "loss": 0.5597, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.015780320366132722, |
| "grad_norm": 9.739468574523926, |
| "learning_rate": 1.1541902478491607e-06, |
| "loss": 0.5616, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.01579862700228833, |
| "grad_norm": 4.332462310791016, |
| "learning_rate": 1.1379642076878528e-06, |
| "loss": 0.1764, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.015816933638443935, |
| "grad_norm": 4.867697715759277, |
| "learning_rate": 1.1218461486359878e-06, |
| "loss": 0.3082, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.01583524027459954, |
| "grad_norm": 6.073899269104004, |
| "learning_rate": 1.1058362670870248e-06, |
| "loss": 0.3394, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.015853546910755148, |
| "grad_norm": 15.319504737854004, |
| "learning_rate": 1.0899347581163222e-06, |
| "loss": 0.3632, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.015871853546910755, |
| "grad_norm": 8.388349533081055, |
| "learning_rate": 1.0741418154787443e-06, |
| "loss": 0.477, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.01589016018306636, |
| "grad_norm": 4.829792499542236, |
| "learning_rate": 1.058457631606319e-06, |
| "loss": 0.2643, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.015908466819221968, |
| "grad_norm": 10.846988677978516, |
| "learning_rate": 1.042882397605871e-06, |
| "loss": 0.6206, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.015926773455377574, |
| "grad_norm": 9.261481285095215, |
| "learning_rate": 1.0274163032567165e-06, |
| "loss": 0.3205, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.01594508009153318, |
| "grad_norm": 1.8402290344238281, |
| "learning_rate": 1.012059537008332e-06, |
| "loss": 0.0896, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.015963386727688787, |
| "grad_norm": 4.178070545196533, |
| "learning_rate": 9.968122859780648e-07, |
| "loss": 0.2115, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.015981693363844394, |
| "grad_norm": 1.212082862854004, |
| "learning_rate": 9.816747359488632e-07, |
| "loss": 0.0796, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.016, |
| "grad_norm": 17.429851531982422, |
| "learning_rate": 9.666470713669918e-07, |
| "loss": 0.6567, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.016018306636155607, |
| "grad_norm": 3.6043081283569336, |
| "learning_rate": 9.517294753398066e-07, |
| "loss": 0.2011, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.016036613272311213, |
| "grad_norm": 6.399504661560059, |
| "learning_rate": 9.369221296335007e-07, |
| "loss": 0.3302, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.01605491990846682, |
| "grad_norm": 17.06818389892578, |
| "learning_rate": 9.222252146709143e-07, |
| "loss": 0.853, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.016073226544622427, |
| "grad_norm": 2.9740846157073975, |
| "learning_rate": 9.076389095293148e-07, |
| "loss": 0.1675, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.016091533180778033, |
| "grad_norm": 3.0257978439331055, |
| "learning_rate": 8.931633919382299e-07, |
| "loss": 0.2347, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.01610983981693364, |
| "grad_norm": 1.34092378616333, |
| "learning_rate": 8.787988382772705e-07, |
| "loss": 0.118, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.016128146453089246, |
| "grad_norm": 0.5438145995140076, |
| "learning_rate": 8.645454235739903e-07, |
| "loss": 0.0775, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.016146453089244853, |
| "grad_norm": 6.302069187164307, |
| "learning_rate": 8.504033215017527e-07, |
| "loss": 0.3397, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.01616475972540046, |
| "grad_norm": 11.530533790588379, |
| "learning_rate": 8.363727043776037e-07, |
| "loss": 0.3956, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.016183066361556066, |
| "grad_norm": 12.510212898254395, |
| "learning_rate": 8.224537431601886e-07, |
| "loss": 0.2942, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.01620137299771167, |
| "grad_norm": 10.052515983581543, |
| "learning_rate": 8.086466074476562e-07, |
| "loss": 0.2827, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.016219679633867275, |
| "grad_norm": 2.31078839302063, |
| "learning_rate": 7.949514654755963e-07, |
| "loss": 0.1479, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.016237986270022882, |
| "grad_norm": 16.138065338134766, |
| "learning_rate": 7.81368484114996e-07, |
| "loss": 0.4199, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.01625629290617849, |
| "grad_norm": 3.326777458190918, |
| "learning_rate": 7.678978288701911e-07, |
| "loss": 0.1543, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.016274599542334095, |
| "grad_norm": 5.338883876800537, |
| "learning_rate": 7.545396638768698e-07, |
| "loss": 0.1821, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.0162929061784897, |
| "grad_norm": 7.2971014976501465, |
| "learning_rate": 7.412941519000527e-07, |
| "loss": 0.3375, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.016311212814645308, |
| "grad_norm": 6.16159725189209, |
| "learning_rate": 7.281614543321269e-07, |
| "loss": 0.2027, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.016329519450800915, |
| "grad_norm": 12.14189338684082, |
| "learning_rate": 7.151417311908648e-07, |
| "loss": 0.4996, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.01634782608695652, |
| "grad_norm": 9.135406494140625, |
| "learning_rate": 7.022351411174866e-07, |
| "loss": 0.3862, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.016366132723112128, |
| "grad_norm": 9.651875495910645, |
| "learning_rate": 6.894418413747183e-07, |
| "loss": 0.3808, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.016384439359267734, |
| "grad_norm": 4.33256196975708, |
| "learning_rate": 6.767619878448783e-07, |
| "loss": 0.2232, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.01640274599542334, |
| "grad_norm": 5.881313800811768, |
| "learning_rate": 6.641957350279838e-07, |
| "loss": 0.2579, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.016421052631578947, |
| "grad_norm": 2.510111093521118, |
| "learning_rate": 6.517432360398556e-07, |
| "loss": 0.1578, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.016439359267734554, |
| "grad_norm": 2.6253507137298584, |
| "learning_rate": 6.394046426102673e-07, |
| "loss": 0.1334, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.01645766590389016, |
| "grad_norm": 5.544801712036133, |
| "learning_rate": 6.271801050810856e-07, |
| "loss": 0.3322, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.016475972540045767, |
| "grad_norm": 5.784806251525879, |
| "learning_rate": 6.150697724044407e-07, |
| "loss": 0.3011, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.016494279176201374, |
| "grad_norm": 10.115022659301758, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 0.4213, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.01651258581235698, |
| "grad_norm": 13.211285591125488, |
| "learning_rate": 5.911923104577455e-07, |
| "loss": 0.2885, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.016530892448512587, |
| "grad_norm": 4.184572696685791, |
| "learning_rate": 5.794254721270331e-07, |
| "loss": 0.2343, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.016549199084668193, |
| "grad_norm": 6.290907859802246, |
| "learning_rate": 5.677734205239904e-07, |
| "loss": 0.259, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.0165675057208238, |
| "grad_norm": 4.348118305206299, |
| "learning_rate": 5.562362976251901e-07, |
| "loss": 0.2195, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.016585812356979406, |
| "grad_norm": 7.895175457000732, |
| "learning_rate": 5.448142440068316e-07, |
| "loss": 0.3582, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.016604118993135013, |
| "grad_norm": 6.373378753662109, |
| "learning_rate": 5.335073988430373e-07, |
| "loss": 0.4875, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.01662242562929062, |
| "grad_norm": 4.442923069000244, |
| "learning_rate": 5.223158999041444e-07, |
| "loss": 0.2706, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.016640732265446226, |
| "grad_norm": 5.623570442199707, |
| "learning_rate": 5.112398835550348e-07, |
| "loss": 0.249, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.01665903890160183, |
| "grad_norm": 5.596260070800781, |
| "learning_rate": 5.002794847534765e-07, |
| "loss": 0.3783, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.016677345537757435, |
| "grad_norm": 4.682590484619141, |
| "learning_rate": 4.894348370484648e-07, |
| "loss": 0.2253, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.016695652173913042, |
| "grad_norm": 8.983024597167969, |
| "learning_rate": 4.787060725786141e-07, |
| "loss": 0.3797, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.01671395881006865, |
| "grad_norm": 1.1545554399490356, |
| "learning_rate": 4.6809332207053083e-07, |
| "loss": 0.1166, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.016732265446224255, |
| "grad_norm": 5.555506229400635, |
| "learning_rate": 4.575967148372318e-07, |
| "loss": 0.2964, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.01675057208237986, |
| "grad_norm": 2.3009276390075684, |
| "learning_rate": 4.4721637877656377e-07, |
| "loss": 0.1593, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.016768878718535468, |
| "grad_norm": 4.247383117675781, |
| "learning_rate": 4.3695244036964567e-07, |
| "loss": 0.1959, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.016787185354691075, |
| "grad_norm": 8.078374862670898, |
| "learning_rate": 4.268050246793276e-07, |
| "loss": 0.1937, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.01680549199084668, |
| "grad_norm": 4.602707386016846, |
| "learning_rate": 4.167742553486676e-07, |
| "loss": 0.2737, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.016823798627002288, |
| "grad_norm": 4.030495643615723, |
| "learning_rate": 4.068602545994249e-07, |
| "loss": 0.1949, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.016842105263157894, |
| "grad_norm": 6.921680450439453, |
| "learning_rate": 3.9706314323056936e-07, |
| "loss": 0.3539, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.0168604118993135, |
| "grad_norm": 4.294434070587158, |
| "learning_rate": 3.8738304061681107e-07, |
| "loss": 0.1642, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.016878718535469107, |
| "grad_norm": 8.08474349975586, |
| "learning_rate": 3.7782006470714614e-07, |
| "loss": 0.531, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.016897025171624714, |
| "grad_norm": 5.4197540283203125, |
| "learning_rate": 3.68374332023419e-07, |
| "loss": 0.1086, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.01691533180778032, |
| "grad_norm": 10.48359203338623, |
| "learning_rate": 3.590459576589e-07, |
| "loss": 0.4211, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.016933638443935927, |
| "grad_norm": 6.148892879486084, |
| "learning_rate": 3.498350552768859e-07, |
| "loss": 0.2935, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.016951945080091534, |
| "grad_norm": 37.46620559692383, |
| "learning_rate": 3.4074173710931804e-07, |
| "loss": 0.4102, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.01697025171624714, |
| "grad_norm": 8.583579063415527, |
| "learning_rate": 3.3176611395540625e-07, |
| "loss": 0.5772, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.016988558352402747, |
| "grad_norm": 7.78873872756958, |
| "learning_rate": 3.2290829518028867e-07, |
| "loss": 0.4475, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.017006864988558353, |
| "grad_norm": 1.913015365600586, |
| "learning_rate": 3.1416838871368925e-07, |
| "loss": 0.1075, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.01702517162471396, |
| "grad_norm": 19.77604103088379, |
| "learning_rate": 3.0554650104861137e-07, |
| "loss": 0.8314, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.017043478260869566, |
| "grad_norm": 6.767706394195557, |
| "learning_rate": 2.970427372400353e-07, |
| "loss": 0.2661, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.017061784897025173, |
| "grad_norm": 6.047036647796631, |
| "learning_rate": 2.8865720090364037e-07, |
| "loss": 0.3102, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.01708009153318078, |
| "grad_norm": 3.897958517074585, |
| "learning_rate": 2.8038999421453827e-07, |
| "loss": 0.2229, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.017098398169336386, |
| "grad_norm": 14.799800872802734, |
| "learning_rate": 2.7224121790603517e-07, |
| "loss": 0.9037, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.017116704805491992, |
| "grad_norm": 1.944534420967102, |
| "learning_rate": 2.6421097126839714e-07, |
| "loss": 0.0869, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.017135011441647596, |
| "grad_norm": 6.719526290893555, |
| "learning_rate": 2.5629935214764866e-07, |
| "loss": 0.2525, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.017153318077803202, |
| "grad_norm": 1.7623887062072754, |
| "learning_rate": 2.4850645694436736e-07, |
| "loss": 0.0721, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.01717162471395881, |
| "grad_norm": 2.431997776031494, |
| "learning_rate": 2.4083238061252565e-07, |
| "loss": 0.1643, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.017189931350114415, |
| "grad_norm": 11.728501319885254, |
| "learning_rate": 2.332772166583208e-07, |
| "loss": 0.234, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.017208237986270022, |
| "grad_norm": 1.5342614650726318, |
| "learning_rate": 2.2584105713904126e-07, |
| "loss": 0.1473, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.01722654462242563, |
| "grad_norm": 8.040061950683594, |
| "learning_rate": 2.1852399266194312e-07, |
| "loss": 0.3064, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.017244851258581235, |
| "grad_norm": 2.8261466026306152, |
| "learning_rate": 2.1132611238315004e-07, |
| "loss": 0.2135, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.01726315789473684, |
| "grad_norm": 5.933513164520264, |
| "learning_rate": 2.0424750400655947e-07, |
| "loss": 0.3271, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.017281464530892448, |
| "grad_norm": 3.4956777095794678, |
| "learning_rate": 1.9728825378278248e-07, |
| "loss": 0.1417, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.017299771167048054, |
| "grad_norm": 6.9485697746276855, |
| "learning_rate": 1.9044844650808468e-07, |
| "loss": 0.326, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.01731807780320366, |
| "grad_norm": 9.41317081451416, |
| "learning_rate": 1.8372816552336025e-07, |
| "loss": 0.4231, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.017336384439359268, |
| "grad_norm": 6.496486663818359, |
| "learning_rate": 1.7712749271311392e-07, |
| "loss": 0.2621, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.017354691075514874, |
| "grad_norm": 9.458244323730469, |
| "learning_rate": 1.706465085044584e-07, |
| "loss": 0.3791, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.01737299771167048, |
| "grad_norm": 1.7211376428604126, |
| "learning_rate": 1.6428529186614195e-07, |
| "loss": 0.1238, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.017391304347826087, |
| "grad_norm": 2.092992067337036, |
| "learning_rate": 1.580439203075812e-07, |
| "loss": 0.1183, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.017409610983981694, |
| "grad_norm": 7.563133716583252, |
| "learning_rate": 1.519224698779198e-07, |
| "loss": 0.2907, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.0174279176201373, |
| "grad_norm": 13.335310935974121, |
| "learning_rate": 1.4592101516509916e-07, |
| "loss": 0.7671, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.017446224256292907, |
| "grad_norm": 10.47811508178711, |
| "learning_rate": 1.400396292949513e-07, |
| "loss": 0.4623, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.017464530892448513, |
| "grad_norm": 7.000487804412842, |
| "learning_rate": 1.3427838393030634e-07, |
| "loss": 0.4022, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.01748283752860412, |
| "grad_norm": 6.856893062591553, |
| "learning_rate": 1.2863734927012094e-07, |
| "loss": 0.3277, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.017501144164759726, |
| "grad_norm": 12.255921363830566, |
| "learning_rate": 1.231165940486234e-07, |
| "loss": 0.502, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.017519450800915333, |
| "grad_norm": 24.51825714111328, |
| "learning_rate": 1.1771618553447217e-07, |
| "loss": 0.3786, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.01753775743707094, |
| "grad_norm": 2.42156982421875, |
| "learning_rate": 1.1243618952994195e-07, |
| "loss": 0.1061, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.017556064073226546, |
| "grad_norm": 19.74112319946289, |
| "learning_rate": 1.0727667037011668e-07, |
| "loss": 0.6959, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.017574370709382153, |
| "grad_norm": 2.00630784034729, |
| "learning_rate": 1.0223769092211012e-07, |
| "loss": 0.1106, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.01759267734553776, |
| "grad_norm": 7.67366361618042, |
| "learning_rate": 9.731931258429638e-08, |
| "loss": 0.173, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.017610983981693362, |
| "grad_norm": 9.130098342895508, |
| "learning_rate": 9.252159528556404e-08, |
| "loss": 0.4265, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.01762929061784897, |
| "grad_norm": 2.1006438732147217, |
| "learning_rate": 8.784459748458318e-08, |
| "loss": 0.0866, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.017647597254004575, |
| "grad_norm": 9.933135032653809, |
| "learning_rate": 8.328837616909612e-08, |
| "loss": 0.2907, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.017665903890160182, |
| "grad_norm": 6.022252559661865, |
| "learning_rate": 7.885298685522235e-08, |
| "loss": 0.3691, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.01768421052631579, |
| "grad_norm": 3.950028419494629, |
| "learning_rate": 7.453848358678018e-08, |
| "loss": 0.138, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.017702517162471395, |
| "grad_norm": 5.163845539093018, |
| "learning_rate": 7.034491893463059e-08, |
| "loss": 0.2737, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.017720823798627, |
| "grad_norm": 7.640467166900635, |
| "learning_rate": 6.627234399603554e-08, |
| "loss": 0.3658, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.017739130434782608, |
| "grad_norm": 6.5803985595703125, |
| "learning_rate": 6.232080839403631e-08, |
| "loss": 0.3093, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.017757437070938215, |
| "grad_norm": 4.211984634399414, |
| "learning_rate": 5.849036027684607e-08, |
| "loss": 0.1708, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.01777574370709382, |
| "grad_norm": 8.148463249206543, |
| "learning_rate": 5.4781046317267103e-08, |
| "loss": 0.3152, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.017794050343249428, |
| "grad_norm": 4.271562099456787, |
| "learning_rate": 5.119291171211793e-08, |
| "loss": 0.1852, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.017812356979405034, |
| "grad_norm": 8.320340156555176, |
| "learning_rate": 4.772600018168816e-08, |
| "loss": 0.3206, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.01783066361556064, |
| "grad_norm": 9.28972053527832, |
| "learning_rate": 4.438035396920004e-08, |
| "loss": 0.3864, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.017848970251716247, |
| "grad_norm": 12.746179580688477, |
| "learning_rate": 4.115601384029666e-08, |
| "loss": 0.694, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.017867276887871854, |
| "grad_norm": 12.518298149108887, |
| "learning_rate": 3.805301908254455e-08, |
| "loss": 0.4285, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.01788558352402746, |
| "grad_norm": 6.564645767211914, |
| "learning_rate": 3.50714075049563e-08, |
| "loss": 0.3002, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.017903890160183067, |
| "grad_norm": 8.353341102600098, |
| "learning_rate": 3.22112154375287e-08, |
| "loss": 0.3428, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.017922196796338673, |
| "grad_norm": 8.169654846191406, |
| "learning_rate": 2.947247773079753e-08, |
| "loss": 0.3041, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.01794050343249428, |
| "grad_norm": 3.4119813442230225, |
| "learning_rate": 2.6855227755419046e-08, |
| "loss": 0.1635, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.017958810068649887, |
| "grad_norm": 0.7701112627983093, |
| "learning_rate": 2.4359497401758026e-08, |
| "loss": 0.0799, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.017977116704805493, |
| "grad_norm": 7.056750297546387, |
| "learning_rate": 2.1985317079500358e-08, |
| "loss": 0.2713, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.0179954233409611, |
| "grad_norm": 13.480681419372559, |
| "learning_rate": 1.973271571728441e-08, |
| "loss": 0.5698, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.018013729977116706, |
| "grad_norm": 2.141348123550415, |
| "learning_rate": 1.7601720762346895e-08, |
| "loss": 0.1175, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.018032036613272313, |
| "grad_norm": 4.411203384399414, |
| "learning_rate": 1.5592358180189782e-08, |
| "loss": 0.1344, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.01805034324942792, |
| "grad_norm": 9.80256462097168, |
| "learning_rate": 1.370465245426167e-08, |
| "loss": 0.3765, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.018068649885583522, |
| "grad_norm": 20.578289031982422, |
| "learning_rate": 1.1938626585660252e-08, |
| "loss": 0.9648, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.01808695652173913, |
| "grad_norm": 6.394954681396484, |
| "learning_rate": 1.0294302092853647e-08, |
| "loss": 0.2917, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.018105263157894735, |
| "grad_norm": 2.968735933303833, |
| "learning_rate": 8.771699011416169e-09, |
| "loss": 0.1552, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.018123569794050342, |
| "grad_norm": 9.075217247009277, |
| "learning_rate": 7.370835893788508e-09, |
| "loss": 0.5275, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.01814187643020595, |
| "grad_norm": 0.4628898799419403, |
| "learning_rate": 6.091729809042379e-09, |
| "loss": 0.0548, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.018160183066361555, |
| "grad_norm": 9.862579345703125, |
| "learning_rate": 4.9343963426840006e-09, |
| "loss": 0.4799, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.01817848970251716, |
| "grad_norm": 0.9560385942459106, |
| "learning_rate": 3.898849596456477e-09, |
| "loss": 0.0928, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.018196796338672768, |
| "grad_norm": 14.042981147766113, |
| "learning_rate": 2.9851021881688314e-09, |
| "loss": 0.4677, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.018215102974828375, |
| "grad_norm": 1.057991862297058, |
| "learning_rate": 2.193165251545004e-09, |
| "loss": 0.112, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.01823340961098398, |
| "grad_norm": 2.119772434234619, |
| "learning_rate": 1.5230484360873043e-09, |
| "loss": 0.105, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.018251716247139588, |
| "grad_norm": 8.898747444152832, |
| "learning_rate": 9.74759906957612e-10, |
| "loss": 0.403, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.018270022883295194, |
| "grad_norm": 4.880192756652832, |
| "learning_rate": 5.483063448785686e-10, |
| "loss": 0.2703, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.0182883295194508, |
| "grad_norm": 1.7518311738967896, |
| "learning_rate": 2.436929460525317e-10, |
| "loss": 0.0996, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.018306636155606407, |
| "grad_norm": 4.965724945068359, |
| "learning_rate": 6.092342209607083e-11, |
| "loss": 0.208, |
| "step": 1000 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|