| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.5, | |
| "eval_steps": 100, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-06, | |
| "loss": 1.2856, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1e-05, | |
| "loss": 1.2602, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.1158, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2e-05, | |
| "loss": 1.1952, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.5e-05, | |
| "loss": 1.2183, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3e-05, | |
| "loss": 1.3459, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.5e-05, | |
| "loss": 1.1262, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4e-05, | |
| "loss": 0.9534, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.5e-05, | |
| "loss": 1.3216, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5e-05, | |
| "loss": 1.1305, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 1.1127, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6e-05, | |
| "loss": 0.9814, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.500000000000001e-05, | |
| "loss": 1.2747, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7e-05, | |
| "loss": 1.0283, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.1814, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8e-05, | |
| "loss": 1.0153, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.5e-05, | |
| "loss": 1.075, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9e-05, | |
| "loss": 0.9444, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.5e-05, | |
| "loss": 1.0805, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001, | |
| "loss": 0.852, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.978260869565218e-05, | |
| "loss": 0.9922, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.956521739130435e-05, | |
| "loss": 0.8473, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.934782608695653e-05, | |
| "loss": 0.7442, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.91304347826087e-05, | |
| "loss": 0.7805, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.891304347826087e-05, | |
| "loss": 0.7853, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.869565217391305e-05, | |
| "loss": 0.684, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.847826086956522e-05, | |
| "loss": 0.6158, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.82608695652174e-05, | |
| "loss": 0.6431, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.804347826086957e-05, | |
| "loss": 0.6562, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.782608695652174e-05, | |
| "loss": 0.5009, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.760869565217392e-05, | |
| "loss": 0.3529, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.739130434782609e-05, | |
| "loss": 0.409, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.717391304347827e-05, | |
| "loss": 0.3635, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.695652173913044e-05, | |
| "loss": 0.3853, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.673913043478261e-05, | |
| "loss": 0.2333, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.652173913043479e-05, | |
| "loss": 0.304, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.630434782608696e-05, | |
| "loss": 0.2929, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.608695652173914e-05, | |
| "loss": 0.1785, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.586956521739131e-05, | |
| "loss": 0.081, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.565217391304348e-05, | |
| "loss": 0.4271, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.543478260869566e-05, | |
| "loss": 0.1938, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.521739130434783e-05, | |
| "loss": 0.2521, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.5e-05, | |
| "loss": 0.2784, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.478260869565218e-05, | |
| "loss": 0.2935, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.456521739130435e-05, | |
| "loss": 0.2826, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.434782608695653e-05, | |
| "loss": 0.2279, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.41304347826087e-05, | |
| "loss": 0.2364, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.391304347826087e-05, | |
| "loss": 0.2543, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.369565217391305e-05, | |
| "loss": 0.1922, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.347826086956522e-05, | |
| "loss": 0.1936, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.32608695652174e-05, | |
| "loss": 0.217, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.304347826086957e-05, | |
| "loss": 0.2158, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.282608695652174e-05, | |
| "loss": 0.1472, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.260869565217392e-05, | |
| "loss": 0.2314, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.239130434782609e-05, | |
| "loss": 0.2205, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.217391304347827e-05, | |
| "loss": 0.1921, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.195652173913044e-05, | |
| "loss": 0.1892, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.173913043478261e-05, | |
| "loss": 0.2255, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.152173913043479e-05, | |
| "loss": 0.1864, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.130434782608696e-05, | |
| "loss": 0.2153, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.108695652173914e-05, | |
| "loss": 0.168, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.086956521739131e-05, | |
| "loss": 0.0987, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.065217391304348e-05, | |
| "loss": 0.2205, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.043478260869566e-05, | |
| "loss": 0.119, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.021739130434783e-05, | |
| "loss": 0.1378, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9e-05, | |
| "loss": 0.1345, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 8.978260869565218e-05, | |
| "loss": 0.2253, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 8.956521739130435e-05, | |
| "loss": 0.128, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 8.934782608695653e-05, | |
| "loss": 0.1143, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 8.91304347826087e-05, | |
| "loss": 0.1149, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 8.891304347826088e-05, | |
| "loss": 0.1714, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 8.869565217391305e-05, | |
| "loss": 0.1608, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 8.847826086956522e-05, | |
| "loss": 0.0919, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 8.82608695652174e-05, | |
| "loss": 0.2223, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 8.804347826086957e-05, | |
| "loss": 0.1387, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 8.782608695652174e-05, | |
| "loss": 0.1626, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 8.760869565217392e-05, | |
| "loss": 0.1865, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 8.739130434782609e-05, | |
| "loss": 0.1157, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 8.717391304347827e-05, | |
| "loss": 0.0903, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 8.695652173913044e-05, | |
| "loss": 0.2334, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.673913043478261e-05, | |
| "loss": 0.095, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.652173913043479e-05, | |
| "loss": 0.1797, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.630434782608696e-05, | |
| "loss": 0.1989, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.608695652173914e-05, | |
| "loss": 0.1347, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.586956521739131e-05, | |
| "loss": 0.1247, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.565217391304348e-05, | |
| "loss": 0.2118, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.543478260869566e-05, | |
| "loss": 0.1537, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.521739130434783e-05, | |
| "loss": 0.3155, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.5e-05, | |
| "loss": 0.0565, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.478260869565218e-05, | |
| "loss": 0.1259, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.456521739130435e-05, | |
| "loss": 0.1834, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.434782608695653e-05, | |
| "loss": 0.311, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.41304347826087e-05, | |
| "loss": 0.1937, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.391304347826088e-05, | |
| "loss": 0.1621, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.369565217391305e-05, | |
| "loss": 0.2723, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.347826086956521e-05, | |
| "loss": 0.1308, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.32608695652174e-05, | |
| "loss": 0.1453, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.304347826086957e-05, | |
| "loss": 0.135, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.282608695652175e-05, | |
| "loss": 0.1921, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.260869565217392e-05, | |
| "loss": 0.0951, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.23913043478261e-05, | |
| "loss": 0.1859, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.217391304347827e-05, | |
| "loss": 0.2049, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.195652173913044e-05, | |
| "loss": 0.1328, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.173913043478262e-05, | |
| "loss": 0.1124, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.152173913043478e-05, | |
| "loss": 0.1932, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.130434782608696e-05, | |
| "loss": 0.1878, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 8.108695652173914e-05, | |
| "loss": 0.1053, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 8.086956521739131e-05, | |
| "loss": 0.1761, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 8.065217391304348e-05, | |
| "loss": 0.1566, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 8.043478260869566e-05, | |
| "loss": 0.1254, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 8.021739130434783e-05, | |
| "loss": 0.1144, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8e-05, | |
| "loss": 0.1335, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.978260869565217e-05, | |
| "loss": 0.1611, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.956521739130434e-05, | |
| "loss": 0.1283, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.934782608695653e-05, | |
| "loss": 0.1697, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.91304347826087e-05, | |
| "loss": 0.1886, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.891304347826088e-05, | |
| "loss": 0.1882, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.869565217391305e-05, | |
| "loss": 0.1962, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.847826086956522e-05, | |
| "loss": 0.104, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.82608695652174e-05, | |
| "loss": 0.1633, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.804347826086957e-05, | |
| "loss": 0.155, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.782608695652173e-05, | |
| "loss": 0.1456, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.76086956521739e-05, | |
| "loss": 0.1612, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.73913043478261e-05, | |
| "loss": 0.0898, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.717391304347827e-05, | |
| "loss": 0.1639, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.695652173913044e-05, | |
| "loss": 0.1077, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.673913043478262e-05, | |
| "loss": 0.1189, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.652173913043479e-05, | |
| "loss": 0.1671, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.630434782608696e-05, | |
| "loss": 0.1334, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.608695652173914e-05, | |
| "loss": 0.0486, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 7.58695652173913e-05, | |
| "loss": 0.0935, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 7.565217391304347e-05, | |
| "loss": 0.1172, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 7.543478260869566e-05, | |
| "loss": 0.1727, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.521739130434783e-05, | |
| "loss": 0.2791, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 0.101, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.478260869565218e-05, | |
| "loss": 0.0511, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.456521739130435e-05, | |
| "loss": 0.1013, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.434782608695653e-05, | |
| "loss": 0.1812, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 7.41304347826087e-05, | |
| "loss": 0.179, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.391304347826086e-05, | |
| "loss": 0.1382, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.369565217391304e-05, | |
| "loss": 0.0617, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.347826086956522e-05, | |
| "loss": 0.0953, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.32608695652174e-05, | |
| "loss": 0.1096, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 7.304347826086957e-05, | |
| "loss": 0.0872, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 7.282608695652175e-05, | |
| "loss": 0.1359, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 7.260869565217392e-05, | |
| "loss": 0.0824, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.23913043478261e-05, | |
| "loss": 0.1948, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.217391304347827e-05, | |
| "loss": 0.166, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.195652173913043e-05, | |
| "loss": 0.1802, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 7.17391304347826e-05, | |
| "loss": 0.0748, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 7.152173913043479e-05, | |
| "loss": 0.1022, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 7.130434782608696e-05, | |
| "loss": 0.1456, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.108695652173914e-05, | |
| "loss": 0.088, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.086956521739131e-05, | |
| "loss": 0.2109, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 7.065217391304349e-05, | |
| "loss": 0.0573, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 7.043478260869566e-05, | |
| "loss": 0.0671, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.021739130434783e-05, | |
| "loss": 0.2151, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7e-05, | |
| "loss": 0.1243, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.978260869565217e-05, | |
| "loss": 0.2758, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.956521739130436e-05, | |
| "loss": 0.1371, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 6.934782608695653e-05, | |
| "loss": 0.1457, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 6.91304347826087e-05, | |
| "loss": 0.0562, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 6.891304347826088e-05, | |
| "loss": 0.1361, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 6.869565217391305e-05, | |
| "loss": 0.0893, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 6.847826086956522e-05, | |
| "loss": 0.1177, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 6.826086956521739e-05, | |
| "loss": 0.1483, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 6.804347826086956e-05, | |
| "loss": 0.1176, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 6.782608695652173e-05, | |
| "loss": 0.162, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 6.760869565217392e-05, | |
| "loss": 0.0821, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 6.73913043478261e-05, | |
| "loss": 0.0605, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 6.717391304347827e-05, | |
| "loss": 0.1529, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 6.695652173913044e-05, | |
| "loss": 0.1352, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 6.673913043478262e-05, | |
| "loss": 0.1002, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 6.652173913043479e-05, | |
| "loss": 0.1243, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 6.630434782608695e-05, | |
| "loss": 0.1483, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 6.608695652173912e-05, | |
| "loss": 0.1103, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 6.58695652173913e-05, | |
| "loss": 0.112, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 6.565217391304349e-05, | |
| "loss": 0.09, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 6.543478260869566e-05, | |
| "loss": 0.0639, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 6.521739130434783e-05, | |
| "loss": 0.0873, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 6.500000000000001e-05, | |
| "loss": 0.0854, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 6.478260869565218e-05, | |
| "loss": 0.1007, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 6.456521739130436e-05, | |
| "loss": 0.1791, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 6.434782608695652e-05, | |
| "loss": 0.1084, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 6.413043478260869e-05, | |
| "loss": 0.0995, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 6.391304347826086e-05, | |
| "loss": 0.0831, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 6.369565217391305e-05, | |
| "loss": 0.1019, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 6.347826086956523e-05, | |
| "loss": 0.1882, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 6.32608695652174e-05, | |
| "loss": 0.1135, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 6.304347826086957e-05, | |
| "loss": 0.1522, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 6.282608695652175e-05, | |
| "loss": 0.0972, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 6.260869565217392e-05, | |
| "loss": 0.0994, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 6.239130434782608e-05, | |
| "loss": 0.0951, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 6.217391304347826e-05, | |
| "loss": 0.0671, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.195652173913043e-05, | |
| "loss": 0.0917, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.173913043478262e-05, | |
| "loss": 0.1442, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.152173913043479e-05, | |
| "loss": 0.069, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.130434782608696e-05, | |
| "loss": 0.1055, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.108695652173914e-05, | |
| "loss": 0.1078, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.086956521739131e-05, | |
| "loss": 0.147, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.0652173913043487e-05, | |
| "loss": 0.1841, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.0434782608695654e-05, | |
| "loss": 0.1241, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.021739130434783e-05, | |
| "loss": 0.12, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6e-05, | |
| "loss": 0.1256, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 5.9782608695652175e-05, | |
| "loss": 0.0793, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.956521739130435e-05, | |
| "loss": 0.0887, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.934782608695652e-05, | |
| "loss": 0.1211, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.9130434782608704e-05, | |
| "loss": 0.1238, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.891304347826088e-05, | |
| "loss": 0.1484, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.869565217391305e-05, | |
| "loss": 0.0768, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.847826086956521e-05, | |
| "loss": 0.2152, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.826086956521739e-05, | |
| "loss": 0.1132, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.804347826086957e-05, | |
| "loss": 0.1141, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.782608695652174e-05, | |
| "loss": 0.0904, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.7608695652173915e-05, | |
| "loss": 0.0334, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 5.739130434782609e-05, | |
| "loss": 0.0939, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 5.717391304347827e-05, | |
| "loss": 0.1181, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 5.695652173913044e-05, | |
| "loss": 0.101, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 5.673913043478262e-05, | |
| "loss": 0.1756, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 5.652173913043478e-05, | |
| "loss": 0.1129, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 5.630434782608696e-05, | |
| "loss": 0.0689, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 5.608695652173913e-05, | |
| "loss": 0.094, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 5.5869565217391306e-05, | |
| "loss": 0.1357, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 5.565217391304348e-05, | |
| "loss": 0.0742, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.5434782608695654e-05, | |
| "loss": 0.0943, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.5217391304347835e-05, | |
| "loss": 0.1144, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 0.1045, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.478260869565217e-05, | |
| "loss": 0.129, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.456521739130434e-05, | |
| "loss": 0.1173, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.4347826086956524e-05, | |
| "loss": 0.1838, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.41304347826087e-05, | |
| "loss": 0.1537, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.391304347826087e-05, | |
| "loss": 0.078, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.3695652173913046e-05, | |
| "loss": 0.1471, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.347826086956522e-05, | |
| "loss": 0.1094, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.32608695652174e-05, | |
| "loss": 0.1005, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.3043478260869574e-05, | |
| "loss": 0.1354, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.2826086956521735e-05, | |
| "loss": 0.0875, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.260869565217391e-05, | |
| "loss": 0.1438, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.239130434782609e-05, | |
| "loss": 0.1666, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.217391304347826e-05, | |
| "loss": 0.0433, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 5.195652173913044e-05, | |
| "loss": 0.0899, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 5.173913043478261e-05, | |
| "loss": 0.1092, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 5.1521739130434785e-05, | |
| "loss": 0.0806, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 5.1304347826086966e-05, | |
| "loss": 0.1033, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 5.108695652173914e-05, | |
| "loss": 0.107, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 5.08695652173913e-05, | |
| "loss": 0.1478, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 5.0652173913043474e-05, | |
| "loss": 0.1734, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 5.0434782608695655e-05, | |
| "loss": 0.065, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 5.021739130434783e-05, | |
| "loss": 0.1782, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 5e-05, | |
| "loss": 0.1212, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.9782608695652176e-05, | |
| "loss": 0.1234, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.956521739130435e-05, | |
| "loss": 0.0608, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.9347826086956524e-05, | |
| "loss": 0.0719, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.91304347826087e-05, | |
| "loss": 0.1053, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.0209, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.8695652173913046e-05, | |
| "loss": 0.1123, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.847826086956522e-05, | |
| "loss": 0.0561, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.8260869565217394e-05, | |
| "loss": 0.0704, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.804347826086957e-05, | |
| "loss": 0.1738, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.782608695652174e-05, | |
| "loss": 0.1209, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.7608695652173916e-05, | |
| "loss": 0.1059, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.739130434782609e-05, | |
| "loss": 0.0983, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.7173913043478264e-05, | |
| "loss": 0.0919, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.695652173913044e-05, | |
| "loss": 0.06, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.673913043478261e-05, | |
| "loss": 0.0488, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.6521739130434785e-05, | |
| "loss": 0.1182, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.630434782608696e-05, | |
| "loss": 0.0696, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.608695652173913e-05, | |
| "loss": 0.1323, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.586956521739131e-05, | |
| "loss": 0.1429, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.565217391304348e-05, | |
| "loss": 0.0536, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.5434782608695655e-05, | |
| "loss": 0.1794, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.521739130434783e-05, | |
| "loss": 0.1623, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.5e-05, | |
| "loss": 0.1291, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.478260869565218e-05, | |
| "loss": 0.0282, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.456521739130435e-05, | |
| "loss": 0.121, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.4347826086956525e-05, | |
| "loss": 0.0357, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.41304347826087e-05, | |
| "loss": 0.2515, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.391304347826087e-05, | |
| "loss": 0.0873, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.3695652173913046e-05, | |
| "loss": 0.1023, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.0918, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.3260869565217394e-05, | |
| "loss": 0.1123, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.304347826086957e-05, | |
| "loss": 0.1115, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.282608695652174e-05, | |
| "loss": 0.0779, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.2608695652173916e-05, | |
| "loss": 0.1158, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.239130434782609e-05, | |
| "loss": 0.1626, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.2173913043478264e-05, | |
| "loss": 0.0482, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.195652173913044e-05, | |
| "loss": 0.0639, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.1739130434782605e-05, | |
| "loss": 0.13, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.1521739130434786e-05, | |
| "loss": 0.0602, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.130434782608696e-05, | |
| "loss": 0.1464, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.1086956521739134e-05, | |
| "loss": 0.0937, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.086956521739131e-05, | |
| "loss": 0.0836, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.065217391304348e-05, | |
| "loss": 0.0842, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.0434782608695655e-05, | |
| "loss": 0.1501, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.021739130434783e-05, | |
| "loss": 0.0968, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4e-05, | |
| "loss": 0.1041, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.978260869565217e-05, | |
| "loss": 0.096, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.956521739130435e-05, | |
| "loss": 0.0934, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.9347826086956525e-05, | |
| "loss": 0.0797, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.91304347826087e-05, | |
| "loss": 0.098, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.8913043478260866e-05, | |
| "loss": 0.0695, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 3.869565217391305e-05, | |
| "loss": 0.1073, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 3.847826086956522e-05, | |
| "loss": 0.1902, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 3.8260869565217395e-05, | |
| "loss": 0.0692, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.0914, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 3.7826086956521736e-05, | |
| "loss": 0.0945, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 3.7608695652173917e-05, | |
| "loss": 0.3069, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 3.739130434782609e-05, | |
| "loss": 0.0858, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 3.7173913043478264e-05, | |
| "loss": 0.0798, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.695652173913043e-05, | |
| "loss": 0.0423, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.673913043478261e-05, | |
| "loss": 0.0677, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 3.6521739130434786e-05, | |
| "loss": 0.0816, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 3.630434782608696e-05, | |
| "loss": 0.0573, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 3.6086956521739134e-05, | |
| "loss": 0.059, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 3.58695652173913e-05, | |
| "loss": 0.0966, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.565217391304348e-05, | |
| "loss": 0.1021, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.5434782608695656e-05, | |
| "loss": 0.0716, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 3.521739130434783e-05, | |
| "loss": 0.1406, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 3.5e-05, | |
| "loss": 0.1697, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 3.478260869565218e-05, | |
| "loss": 0.074, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 3.456521739130435e-05, | |
| "loss": 0.0922, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 3.4347826086956526e-05, | |
| "loss": 0.0864, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.413043478260869e-05, | |
| "loss": 0.186, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.3913043478260867e-05, | |
| "loss": 0.0498, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.369565217391305e-05, | |
| "loss": 0.0901, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 3.347826086956522e-05, | |
| "loss": 0.0907, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 3.3260869565217395e-05, | |
| "loss": 0.0481, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 3.304347826086956e-05, | |
| "loss": 0.0889, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.282608695652174e-05, | |
| "loss": 0.1153, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.0999, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 3.239130434782609e-05, | |
| "loss": 0.1522, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.217391304347826e-05, | |
| "loss": 0.0708, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.195652173913043e-05, | |
| "loss": 0.0612, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.173913043478261e-05, | |
| "loss": 0.0888, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.152173913043479e-05, | |
| "loss": 0.1127, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 3.130434782608696e-05, | |
| "loss": 0.0957, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.108695652173913e-05, | |
| "loss": 0.0678, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.086956521739131e-05, | |
| "loss": 0.0593, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 3.065217391304348e-05, | |
| "loss": 0.0426, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 3.0434782608695656e-05, | |
| "loss": 0.0921, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 3.0217391304347827e-05, | |
| "loss": 0.0893, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3e-05, | |
| "loss": 0.0812, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.9782608695652175e-05, | |
| "loss": 0.0792, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 2.9565217391304352e-05, | |
| "loss": 0.0879, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 2.9347826086956526e-05, | |
| "loss": 0.1001, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 2.9130434782608696e-05, | |
| "loss": 0.0404, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 2.891304347826087e-05, | |
| "loss": 0.085, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 2.8695652173913044e-05, | |
| "loss": 0.0907, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 2.847826086956522e-05, | |
| "loss": 0.0475, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 2.826086956521739e-05, | |
| "loss": 0.0555, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 2.8043478260869566e-05, | |
| "loss": 0.058, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 2.782608695652174e-05, | |
| "loss": 0.0339, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 2.7608695652173917e-05, | |
| "loss": 0.0455, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 2.7391304347826085e-05, | |
| "loss": 0.1721, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.1153, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 2.6956521739130436e-05, | |
| "loss": 0.0864, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 2.673913043478261e-05, | |
| "loss": 0.052, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 2.6521739130434787e-05, | |
| "loss": 0.1067, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 2.6304347826086954e-05, | |
| "loss": 0.1457, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 2.608695652173913e-05, | |
| "loss": 0.0143, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.5869565217391305e-05, | |
| "loss": 0.0317, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.5652173913043483e-05, | |
| "loss": 0.1009, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.543478260869565e-05, | |
| "loss": 0.0647, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.5217391304347827e-05, | |
| "loss": 0.0487, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.027, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.4782608695652175e-05, | |
| "loss": 0.0654, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.456521739130435e-05, | |
| "loss": 0.0511, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.4347826086956523e-05, | |
| "loss": 0.0678, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.4130434782608697e-05, | |
| "loss": 0.058, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.391304347826087e-05, | |
| "loss": 0.0787, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.3695652173913045e-05, | |
| "loss": 0.056, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.347826086956522e-05, | |
| "loss": 0.1519, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.3260869565217393e-05, | |
| "loss": 0.0411, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.3043478260869567e-05, | |
| "loss": 0.0525, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.282608695652174e-05, | |
| "loss": 0.0942, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.2608695652173914e-05, | |
| "loss": 0.0333, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.239130434782609e-05, | |
| "loss": 0.0497, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.2173913043478262e-05, | |
| "loss": 0.0733, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.1956521739130436e-05, | |
| "loss": 0.1511, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.1551, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.1521739130434784e-05, | |
| "loss": 0.0532, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.1304347826086958e-05, | |
| "loss": 0.0677, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.1086956521739132e-05, | |
| "loss": 0.042, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.0869565217391303e-05, | |
| "loss": 0.0878, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 2.065217391304348e-05, | |
| "loss": 0.1585, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 2.0434782608695654e-05, | |
| "loss": 0.0931, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 2.0217391304347828e-05, | |
| "loss": 0.1007, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0597, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.9782608695652176e-05, | |
| "loss": 0.0789, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.956521739130435e-05, | |
| "loss": 0.0721, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.9347826086956523e-05, | |
| "loss": 0.1328, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.9130434782608697e-05, | |
| "loss": 0.0585, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.8913043478260868e-05, | |
| "loss": 0.1127, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.8695652173913045e-05, | |
| "loss": 0.0667, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.8478260869565216e-05, | |
| "loss": 0.1232, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.8260869565217393e-05, | |
| "loss": 0.0839, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.8043478260869567e-05, | |
| "loss": 0.0348, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.782608695652174e-05, | |
| "loss": 0.0441, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.7608695652173915e-05, | |
| "loss": 0.0862, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.739130434782609e-05, | |
| "loss": 0.0758, | |
| "step": 400 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 480, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "total_flos": 8.459864193775534e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |