| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "global_step": 262, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.3281, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5e-06, | |
| "loss": 0.3522, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.2372, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1e-05, | |
| "loss": 0.2043, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.3516, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.2391, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 0.1406, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2e-05, | |
| "loss": 0.1592, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.994805194805195e-05, | |
| "loss": 0.2057, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.98961038961039e-05, | |
| "loss": 0.2209, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9844155844155846e-05, | |
| "loss": 0.2098, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9792207792207794e-05, | |
| "loss": 0.2233, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.974025974025974e-05, | |
| "loss": 0.1877, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.968831168831169e-05, | |
| "loss": 0.2094, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.963636363636364e-05, | |
| "loss": 0.1608, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9584415584415586e-05, | |
| "loss": 0.1873, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9532467532467533e-05, | |
| "loss": 0.2251, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9480519480519483e-05, | |
| "loss": 0.2143, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.942857142857143e-05, | |
| "loss": 0.179, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9376623376623377e-05, | |
| "loss": 0.2063, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9324675324675325e-05, | |
| "loss": 0.1789, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9272727272727275e-05, | |
| "loss": 0.1918, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9220779220779222e-05, | |
| "loss": 0.1587, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.916883116883117e-05, | |
| "loss": 0.1268, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9116883116883117e-05, | |
| "loss": 0.1884, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9064935064935067e-05, | |
| "loss": 0.2015, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9012987012987014e-05, | |
| "loss": 0.1141, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.896103896103896e-05, | |
| "loss": 0.2242, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.8909090909090912e-05, | |
| "loss": 0.1462, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.885714285714286e-05, | |
| "loss": 0.1986, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.8805194805194806e-05, | |
| "loss": 0.1237, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.8753246753246753e-05, | |
| "loss": 0.1657, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.8701298701298704e-05, | |
| "loss": 0.1336, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.864935064935065e-05, | |
| "loss": 0.1469, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8597402597402598e-05, | |
| "loss": 0.1352, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8545454545454545e-05, | |
| "loss": 0.1617, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8493506493506496e-05, | |
| "loss": 0.1403, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.8441558441558443e-05, | |
| "loss": 0.1255, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.838961038961039e-05, | |
| "loss": 0.102, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8337662337662337e-05, | |
| "loss": 0.1318, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8285714285714288e-05, | |
| "loss": 0.117, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.8233766233766235e-05, | |
| "loss": 0.0931, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.0899, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.812987012987013e-05, | |
| "loss": 0.1517, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.807792207792208e-05, | |
| "loss": 0.1423, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8025974025974027e-05, | |
| "loss": 0.1418, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.7974025974025974e-05, | |
| "loss": 0.1723, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.792207792207792e-05, | |
| "loss": 0.1797, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.7870129870129872e-05, | |
| "loss": 0.1365, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.781818181818182e-05, | |
| "loss": 0.0738, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.7766233766233766e-05, | |
| "loss": 0.0668, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.7714285714285717e-05, | |
| "loss": 0.1373, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.7662337662337664e-05, | |
| "loss": 0.0799, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.761038961038961e-05, | |
| "loss": 0.1018, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.7558441558441558e-05, | |
| "loss": 0.0802, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.750649350649351e-05, | |
| "loss": 0.1481, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.7454545454545456e-05, | |
| "loss": 0.0631, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.7402597402597403e-05, | |
| "loss": 0.1118, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.735064935064935e-05, | |
| "loss": 0.1086, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.72987012987013e-05, | |
| "loss": 0.1337, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7246753246753248e-05, | |
| "loss": 0.1397, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7194805194805195e-05, | |
| "loss": 0.0519, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7142857142857142e-05, | |
| "loss": 0.1578, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.7090909090909092e-05, | |
| "loss": 0.079, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.703896103896104e-05, | |
| "loss": 0.1342, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.6987012987012987e-05, | |
| "loss": 0.1528, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.6935064935064934e-05, | |
| "loss": 0.1065, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.6883116883116884e-05, | |
| "loss": 0.1988, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.683116883116883e-05, | |
| "loss": 0.2357, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.677922077922078e-05, | |
| "loss": 0.1789, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.672727272727273e-05, | |
| "loss": 0.0914, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.6675324675324676e-05, | |
| "loss": 0.1567, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6623376623376627e-05, | |
| "loss": 0.124, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6571428571428574e-05, | |
| "loss": 0.097, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.651948051948052e-05, | |
| "loss": 0.087, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.646753246753247e-05, | |
| "loss": 0.1105, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.641558441558442e-05, | |
| "loss": 0.0719, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.0637, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.6311688311688313e-05, | |
| "loss": 0.0794, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6259740259740264e-05, | |
| "loss": 0.125, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.620779220779221e-05, | |
| "loss": 0.1411, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6155844155844158e-05, | |
| "loss": 0.0768, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6103896103896105e-05, | |
| "loss": 0.0986, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.6051948051948056e-05, | |
| "loss": 0.0743, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.0504, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.594805194805195e-05, | |
| "loss": 0.0588, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.5896103896103897e-05, | |
| "loss": 0.0699, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.5844155844155847e-05, | |
| "loss": 0.1118, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.5792207792207795e-05, | |
| "loss": 0.0397, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5740259740259742e-05, | |
| "loss": 0.0709, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.568831168831169e-05, | |
| "loss": 0.0757, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.563636363636364e-05, | |
| "loss": 0.0669, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.5584415584415587e-05, | |
| "loss": 0.1155, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.5532467532467534e-05, | |
| "loss": 0.0716, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.548051948051948e-05, | |
| "loss": 0.0836, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.542857142857143e-05, | |
| "loss": 0.0805, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.537662337662338e-05, | |
| "loss": 0.0519, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.5324675324675326e-05, | |
| "loss": 0.0903, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.5272727272727276e-05, | |
| "loss": 0.1128, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.5220779220779223e-05, | |
| "loss": 0.0448, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.516883116883117e-05, | |
| "loss": 0.0941, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.511688311688312e-05, | |
| "loss": 0.0782, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.5064935064935066e-05, | |
| "loss": 0.1422, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.5012987012987015e-05, | |
| "loss": 0.1379, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.4961038961038962e-05, | |
| "loss": 0.1574, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.4909090909090911e-05, | |
| "loss": 0.0878, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.4857142857142858e-05, | |
| "loss": 0.0585, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.4805194805194807e-05, | |
| "loss": 0.0342, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4753246753246754e-05, | |
| "loss": 0.1312, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.4701298701298703e-05, | |
| "loss": 0.0919, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.464935064935065e-05, | |
| "loss": 0.0693, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.45974025974026e-05, | |
| "loss": 0.0661, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.0699, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.4493506493506495e-05, | |
| "loss": 0.0784, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.4441558441558442e-05, | |
| "loss": 0.0854, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4389610389610391e-05, | |
| "loss": 0.1409, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.433766233766234e-05, | |
| "loss": 0.0833, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.4285714285714287e-05, | |
| "loss": 0.068, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.4233766233766236e-05, | |
| "loss": 0.0484, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.4181818181818183e-05, | |
| "loss": 0.1664, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.4129870129870132e-05, | |
| "loss": 0.1496, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.4077922077922079e-05, | |
| "loss": 0.0956, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.4025974025974028e-05, | |
| "loss": 0.0733, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3974025974025975e-05, | |
| "loss": 0.0785, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3922077922077924e-05, | |
| "loss": 0.1201, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.3870129870129871e-05, | |
| "loss": 0.0393, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.381818181818182e-05, | |
| "loss": 0.1055, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.3766233766233767e-05, | |
| "loss": 0.0835, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.3714285714285716e-05, | |
| "loss": 0.043, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3662337662337663e-05, | |
| "loss": 0.2135, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.3610389610389612e-05, | |
| "loss": 0.0353, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.0589701272547245, | |
| "eval_pearson": 0.8326156177710631, | |
| "eval_runtime": 3.859, | |
| "eval_samples_per_second": 60.119, | |
| "eval_spearmanr": 0.8224932979448152, | |
| "eval_steps_per_second": 3.887, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.3558441558441559e-05, | |
| "loss": 0.0296, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.3506493506493508e-05, | |
| "loss": 0.0956, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.3454545454545455e-05, | |
| "loss": 0.0395, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.3402597402597404e-05, | |
| "loss": 0.0647, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.3350649350649351e-05, | |
| "loss": 0.0594, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.32987012987013e-05, | |
| "loss": 0.0355, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.3246753246753249e-05, | |
| "loss": 0.1092, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.3194805194805196e-05, | |
| "loss": 0.0696, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.3142857142857145e-05, | |
| "loss": 0.0688, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.3090909090909092e-05, | |
| "loss": 0.0557, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.303896103896104e-05, | |
| "loss": 0.0349, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.2987012987012988e-05, | |
| "loss": 0.0834, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.2935064935064937e-05, | |
| "loss": 0.0609, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.2883116883116884e-05, | |
| "loss": 0.0893, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.2831168831168832e-05, | |
| "loss": 0.0558, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.277922077922078e-05, | |
| "loss": 0.026, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.0391, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.2675324675324676e-05, | |
| "loss": 0.0347, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.2623376623376624e-05, | |
| "loss": 0.0587, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.2571428571428572e-05, | |
| "loss": 0.0304, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.251948051948052e-05, | |
| "loss": 0.0639, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.2467532467532468e-05, | |
| "loss": 0.0583, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.2415584415584416e-05, | |
| "loss": 0.123, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.2363636363636364e-05, | |
| "loss": 0.0623, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.2311688311688312e-05, | |
| "loss": 0.0276, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.2259740259740261e-05, | |
| "loss": 0.032, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.2207792207792208e-05, | |
| "loss": 0.0449, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.2155844155844157e-05, | |
| "loss": 0.0714, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.2103896103896104e-05, | |
| "loss": 0.0722, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.2051948051948053e-05, | |
| "loss": 0.0407, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.0308, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.1948051948051949e-05, | |
| "loss": 0.0235, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.1896103896103896e-05, | |
| "loss": 0.0852, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.1844155844155845e-05, | |
| "loss": 0.0386, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.1792207792207792e-05, | |
| "loss": 0.0358, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.1740259740259741e-05, | |
| "loss": 0.1125, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.1688311688311688e-05, | |
| "loss": 0.052, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.1636363636363637e-05, | |
| "loss": 0.0377, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.1584415584415584e-05, | |
| "loss": 0.0505, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.1532467532467533e-05, | |
| "loss": 0.044, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.148051948051948e-05, | |
| "loss": 0.0426, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.1428571428571429e-05, | |
| "loss": 0.0364, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.1376623376623376e-05, | |
| "loss": 0.0389, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.1324675324675325e-05, | |
| "loss": 0.0569, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.1272727272727272e-05, | |
| "loss": 0.0392, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.1220779220779221e-05, | |
| "loss": 0.0828, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.116883116883117e-05, | |
| "loss": 0.0564, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.1116883116883117e-05, | |
| "loss": 0.0628, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.1064935064935066e-05, | |
| "loss": 0.0632, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1012987012987013e-05, | |
| "loss": 0.074, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.0961038961038962e-05, | |
| "loss": 0.0342, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.1477, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.0857142857142858e-05, | |
| "loss": 0.0523, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.0805194805194805e-05, | |
| "loss": 0.0695, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.0753246753246754e-05, | |
| "loss": 0.0299, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.0701298701298701e-05, | |
| "loss": 0.0528, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.064935064935065e-05, | |
| "loss": 0.0393, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.0597402597402597e-05, | |
| "loss": 0.0349, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.0545454545454546e-05, | |
| "loss": 0.0496, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.0493506493506493e-05, | |
| "loss": 0.0342, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0441558441558442e-05, | |
| "loss": 0.0352, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0389610389610389e-05, | |
| "loss": 0.1237, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0337662337662338e-05, | |
| "loss": 0.026, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.0285714285714285e-05, | |
| "loss": 0.0519, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0233766233766234e-05, | |
| "loss": 0.0343, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0181818181818182e-05, | |
| "loss": 0.1232, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.012987012987013e-05, | |
| "loss": 0.0244, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0077922077922078e-05, | |
| "loss": 0.0435, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0025974025974026e-05, | |
| "loss": 0.0394, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.974025974025974e-06, | |
| "loss": 0.0504, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.922077922077923e-06, | |
| "loss": 0.0303, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.87012987012987e-06, | |
| "loss": 0.035, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.81818181818182e-06, | |
| "loss": 0.0338, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.766233766233766e-06, | |
| "loss": 0.0388, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.714285714285715e-06, | |
| "loss": 0.0204, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.662337662337662e-06, | |
| "loss": 0.0424, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.610389610389611e-06, | |
| "loss": 0.1209, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.558441558441558e-06, | |
| "loss": 0.039, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.506493506493507e-06, | |
| "loss": 0.0532, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.454545454545456e-06, | |
| "loss": 0.0427, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.402597402597403e-06, | |
| "loss": 0.0528, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.350649350649352e-06, | |
| "loss": 0.063, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.298701298701299e-06, | |
| "loss": 0.0272, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.246753246753248e-06, | |
| "loss": 0.0436, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.194805194805195e-06, | |
| "loss": 0.0317, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.142857142857144e-06, | |
| "loss": 0.0459, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.0465, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.03896103896104e-06, | |
| "loss": 0.0398, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.987012987012987e-06, | |
| "loss": 0.0361, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.935064935064936e-06, | |
| "loss": 0.0452, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.883116883116883e-06, | |
| "loss": 0.0308, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.831168831168832e-06, | |
| "loss": 0.0434, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.779220779220779e-06, | |
| "loss": 0.0484, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.727272727272728e-06, | |
| "loss": 0.0371, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.675324675324675e-06, | |
| "loss": 0.028, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.623376623376624e-06, | |
| "loss": 0.0625, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.0986, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.51948051948052e-06, | |
| "loss": 0.0389, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.467532467532467e-06, | |
| "loss": 0.0575, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.415584415584416e-06, | |
| "loss": 0.0304, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.363636363636365e-06, | |
| "loss": 0.0548, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.311688311688313e-06, | |
| "loss": 0.0157, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.25974025974026e-06, | |
| "loss": 0.0265, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.20779220779221e-06, | |
| "loss": 0.042, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.155844155844157e-06, | |
| "loss": 0.0515, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.103896103896105e-06, | |
| "loss": 0.0408, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 8.051948051948052e-06, | |
| "loss": 0.0163, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.0286, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.948051948051948e-06, | |
| "loss": 0.0437, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.896103896103897e-06, | |
| "loss": 0.0407, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 7.844155844155844e-06, | |
| "loss": 0.046, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 7.792207792207793e-06, | |
| "loss": 0.0162, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.74025974025974e-06, | |
| "loss": 0.0498, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 7.68831168831169e-06, | |
| "loss": 0.0766, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.636363636363638e-06, | |
| "loss": 0.0149, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 7.584415584415585e-06, | |
| "loss": 0.0431, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 7.532467532467533e-06, | |
| "loss": 0.0107, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.480519480519481e-06, | |
| "loss": 0.0189, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 7.428571428571429e-06, | |
| "loss": 0.0355, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.376623376623377e-06, | |
| "loss": 0.0275, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.324675324675325e-06, | |
| "loss": 0.0337, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.0262, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.220779220779221e-06, | |
| "loss": 0.012, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.16883116883117e-06, | |
| "loss": 0.0682, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.116883116883118e-06, | |
| "loss": 0.0455, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 7.064935064935066e-06, | |
| "loss": 0.0277, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.012987012987014e-06, | |
| "loss": 0.059, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.961038961038962e-06, | |
| "loss": 0.0511, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.90909090909091e-06, | |
| "loss": 0.0355, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.857142857142858e-06, | |
| "loss": 0.0559, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.805194805194806e-06, | |
| "loss": 0.0478, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.036842476576566696, | |
| "eval_pearson": 0.9234436553897939, | |
| "eval_runtime": 3.2412, | |
| "eval_samples_per_second": 71.578, | |
| "eval_spearmanr": 0.889384223771582, | |
| "eval_steps_per_second": 4.628, | |
| "step": 262 | |
| } | |
| ], | |
| "max_steps": 393, | |
| "num_train_epochs": 3, | |
| "total_flos": 551530597182240.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |