| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "global_step": 705, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 9.090909090909091e-06, |
| "loss": 1.574, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8181818181818182e-05, |
| "loss": 1.618, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7272727272727273e-05, |
| "loss": 1.5453, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6363636363636364e-05, |
| "loss": 1.4047, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.545454545454546e-05, |
| "loss": 1.4909, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 5.4545454545454546e-05, |
| "loss": 1.4884, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.363636363636364e-05, |
| "loss": 1.449, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 7.272727272727273e-05, |
| "loss": 1.4063, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.181818181818183e-05, |
| "loss": 1.4595, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.090909090909092e-05, |
| "loss": 1.3918, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 0.0001, |
| "loss": 1.3847, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 0.00010909090909090909, |
| "loss": 1.4178, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 0.0001181818181818182, |
| "loss": 1.3751, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 0.00012727272727272728, |
| "loss": 1.3377, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 0.00013636363636363637, |
| "loss": 1.3761, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 0.00014545454545454546, |
| "loss": 1.3358, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 0.00015454545454545454, |
| "loss": 1.3477, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00016363636363636366, |
| "loss": 1.3174, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00017272727272727275, |
| "loss": 1.3381, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00018181818181818183, |
| "loss": 1.3524, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00019090909090909092, |
| "loss": 1.3306, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.0002, |
| "loss": 1.3218, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00019999894214150818, |
| "loss": 1.3003, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00019999576858841395, |
| "loss": 1.3422, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00019999047940786073, |
| "loss": 1.3054, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00019998307471175264, |
| "loss": 1.3046, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00019997355465675205, |
| "loss": 1.3248, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00019996191944427638, |
| "loss": 1.3328, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00019994816932049383, |
| "loss": 1.3285, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001999323045763181, |
| "loss": 1.3223, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00019991432554740225, |
| "loss": 1.3395, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001998942326141317, |
| "loss": 1.3085, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.000199872026201616, |
| "loss": 1.329, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00019984770677968, |
| "loss": 1.2921, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019982127486285384, |
| "loss": 1.3047, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001997927310103621, |
| "loss": 1.2923, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019976207582611189, |
| "loss": 1.2481, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019972930995868014, |
| "loss": 1.2574, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001996944341012999, |
| "loss": 1.3083, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001996574489918456, |
| "loss": 1.2613, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019961835541281746, |
| "loss": 1.2648, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019957715419132498, |
| "loss": 1.2976, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019953384619906945, |
| "loss": 1.3284, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019948843235232535, |
| "loss": 1.2759, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001994409136119212, |
| "loss": 1.2619, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019939129098321904, |
| "loss": 1.2225, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019933956551609322, |
| "loss": 1.3009, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019928573830490826, |
| "loss": 1.3523, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019922981048849564, |
| "loss": 1.2688, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019917178325012963, |
| "loss": 1.2939, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019911165781750237, |
| "loss": 1.2932, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019904943546269785, |
| "loss": 1.2886, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019898511750216505, |
| "loss": 1.2656, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001989187052966899, |
| "loss": 1.2864, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019885020025136677, |
| "loss": 1.287, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001987796038155685, |
| "loss": 1.2534, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001987069174829159, |
| "loss": 1.3241, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019863214279124608, |
| "loss": 1.2915, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019855528132257984, |
| "loss": 1.2197, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019847633470308833, |
| "loss": 1.2212, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019839530460305862, |
| "loss": 1.2421, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019831219273685826, |
| "loss": 1.2741, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019822700086289915, |
| "loss": 1.2633, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019813973078360025, |
| "loss": 1.2529, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001980503843453494, |
| "loss": 1.2905, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019795896343846437, |
| "loss": 1.2651, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019786546999715285, |
| "loss": 1.2507, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019776990599947147, |
| "loss": 1.2113, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019767227346728392, |
| "loss": 1.262, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019757257446621827, |
| "loss": 1.2706, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019747081110562322, |
| "loss": 1.2491, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001973669855385235, |
| "loss": 1.3059, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019726109996157424, |
| "loss": 1.2589, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001971531566150145, |
| "loss": 1.2999, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019704315778262016, |
| "loss": 1.2421, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019693110579165513, |
| "loss": 1.3066, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019681700301282234, |
| "loss": 1.2259, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019670085186021375, |
| "loss": 1.2563, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.000196582654791259, |
| "loss": 1.2243, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019646241430667353, |
| "loss": 1.2172, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001963401329504057, |
| "loss": 1.2653, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019621581330958295, |
| "loss": 1.3283, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.000196089458014457, |
| "loss": 1.2118, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019596106973834835, |
| "loss": 1.2633, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001958306511975895, |
| "loss": 1.2357, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019569820515146768, |
| "loss": 1.2624, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001955637344021664, |
| "loss": 1.2221, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019542724179470616, |
| "loss": 1.2304, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001952887302168842, |
| "loss": 1.235, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019514820259921352, |
| "loss": 1.2106, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019500566191486075, |
| "loss": 1.2013, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019486111117958342, |
| "loss": 1.2796, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019471455345166595, |
| "loss": 1.195, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019456599183185507, |
| "loss": 1.2813, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019441542946329422, |
| "loss": 1.2356, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019426286953145704, |
| "loss": 1.2815, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019410831526407984, |
| "loss": 1.2286, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019395176993109356, |
| "loss": 1.2427, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.0001937932368445544, |
| "loss": 1.2076, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019363271935857372, |
| "loss": 1.2544, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019347022086924732, |
| "loss": 1.2632, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019330574481458333, |
| "loss": 1.2088, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019313929467442952, |
| "loss": 1.2348, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019297087397039984, |
| "loss": 1.2186, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019280048626579962, |
| "loss": 1.2039, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001926281351655506, |
| "loss": 1.2665, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001924538243161142, |
| "loss": 1.2293, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001922775574054147, |
| "loss": 1.2712, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019209933816276102, |
| "loss": 1.2019, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019191917035876798, |
| "loss": 1.2248, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019173705780527642, |
| "loss": 1.2718, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019155300435527256, |
| "loss": 1.1894, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019136701390280644, |
| "loss": 1.2603, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019117909038290974, |
| "loss": 1.2292, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019098923777151222, |
| "loss": 1.2264, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019079746008535784, |
| "loss": 1.2553, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001906037613819197, |
| "loss": 1.287, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019040814575931413, |
| "loss": 1.1868, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019021061735621412, |
| "loss": 1.2465, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019001118035176162, |
| "loss": 1.2324, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001898098389654792, |
| "loss": 1.242, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001896065974571808, |
| "loss": 1.2202, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00018940146012688146, |
| "loss": 1.2269, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00018919443131470658, |
| "loss": 1.2456, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00018898551540079989, |
| "loss": 1.1917, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00018877471680523082, |
| "loss": 1.2336, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00018856203998790112, |
| "loss": 1.2126, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00018834748944845028, |
| "loss": 1.2487, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00018813106972616055, |
| "loss": 1.2732, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001879127853998607, |
| "loss": 1.2385, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00018769264108782933, |
| "loss": 1.2294, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00018747064144769703, |
| "loss": 1.2875, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001872467911763479, |
| "loss": 1.266, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001870210950098201, |
| "loss": 1.1919, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00018679355772320585, |
| "loss": 1.2646, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00018656418413055007, |
| "loss": 1.2151, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001863329790847488, |
| "loss": 1.224, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001860999474774466, |
| "loss": 1.2197, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00018586509423893267, |
| "loss": 1.2487, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00018562842433803687, |
| "loss": 1.2164, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00018538994278202448, |
| "loss": 1.2395, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001851496546164903, |
| "loss": 1.2454, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00018490756492525187, |
| "loss": 1.2096, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00018466367883024186, |
| "loss": 1.1975, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00018441800149139988, |
| "loss": 1.19, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00018417053810656302, |
| "loss": 1.2228, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001839212939113562, |
| "loss": 1.2044, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00018367027417908117, |
| "loss": 1.2381, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00018341748422060503, |
| "loss": 1.225, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00018316292938424787, |
| "loss": 1.2104, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00018290661505566963, |
| "loss": 1.229, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00018264854665775605, |
| "loss": 1.2396, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001823887296505041, |
| "loss": 1.2132, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00018212716953090624, |
| "loss": 1.2255, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00018186387183283443, |
| "loss": 1.2393, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00018159884212692274, |
| "loss": 1.273, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00018133208602044972, |
| "loss": 1.2314, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00018106360915721956, |
| "loss": 1.1983, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.0001807934172174429, |
| "loss": 1.1759, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00018052151591761644, |
| "loss": 1.2081, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.0001802479110104022, |
| "loss": 1.1594, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.0001799726082845057, |
| "loss": 1.2388, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00017969561356455336, |
| "loss": 1.2052, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00017941693271096966, |
| "loss": 1.2385, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00017913657161985268, |
| "loss": 1.225, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.0001788545362228496, |
| "loss": 1.2173, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00017857083248703126, |
| "loss": 1.1866, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00017828546641476578, |
| "loss": 1.2196, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001779984440435916, |
| "loss": 1.2535, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00017770977144608978, |
| "loss": 1.2124, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.0001774194547297555, |
| "loss": 1.2213, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00017712750003686883, |
| "loss": 1.165, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001768339135443648, |
| "loss": 1.2336, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00017653870146370267, |
| "loss": 1.2574, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00017624187004073463, |
| "loss": 1.201, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.0001759434255555734, |
| "loss": 1.2127, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00017564337432245976, |
| "loss": 1.1924, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00017534172268962852, |
| "loss": 1.293, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00017503847703917455, |
| "loss": 1.3086, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001747336437869176, |
| "loss": 1.2584, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00017442722938226647, |
| "loss": 1.242, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00017411924030808284, |
| "loss": 1.1669, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00017380968308054385, |
| "loss": 1.2288, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001734985642490043, |
| "loss": 1.2583, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00017318589039585816, |
| "loss": 1.2701, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001728716681363993, |
| "loss": 1.2294, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00017255590411868136, |
| "loss": 1.1843, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00017223860502337733, |
| "loss": 1.2367, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00017191977756363808, |
| "loss": 1.2128, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00017159942848495025, |
| "loss": 1.1754, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00017127756456499372, |
| "loss": 1.1917, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001709541926134982, |
| "loss": 1.2122, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001706293194720989, |
| "loss": 1.2336, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00017030295201419206, |
| "loss": 1.2285, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00016997509714478944, |
| "loss": 1.2509, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00016964576180037217, |
| "loss": 1.217, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00016931495294874408, |
| "loss": 1.2189, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00016898267758888423, |
| "loss": 1.1784, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00016864894275079882, |
| "loss": 1.2197, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00016831375549537252, |
| "loss": 1.2552, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00016797712291421904, |
| "loss": 1.1743, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00016763905212953102, |
| "loss": 1.2396, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001672995502939295, |
| "loss": 1.2283, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00016695862459031248, |
| "loss": 1.1892, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00016661628223170295, |
| "loss": 1.1739, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00016627253046109638, |
| "loss": 1.1211, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0001659273765513073, |
| "loss": 1.1466, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00016558082780481563, |
| "loss": 1.1761, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00016523289155361204, |
| "loss": 1.2146, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00016488357515904295, |
| "loss": 1.2032, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001645328860116546, |
| "loss": 1.2129, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00016418083153103683, |
| "loss": 1.2211, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0001638274191656661, |
| "loss": 1.2022, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00016347265639274778, |
| "loss": 1.2057, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00016311655071805822, |
| "loss": 1.229, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00016275910967578558, |
| "loss": 1.1658, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00016240034082837078, |
| "loss": 1.2102, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00016204025176634712, |
| "loss": 1.2057, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00016167885010818017, |
| "loss": 1.222, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00016131614350010614, |
| "loss": 1.1824, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00016095213961597033, |
| "loss": 1.2212, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00016058684615706477, |
| "loss": 1.2101, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00016022027085196516, |
| "loss": 1.153, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001598524214563675, |
| "loss": 1.1471, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00015948330575292401, |
| "loss": 1.1859, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0001591129315510782, |
| "loss": 1.1612, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00015874130668690003, |
| "loss": 1.1691, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00015836843902291984, |
| "loss": 1.1859, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00015799433644796216, |
| "loss": 1.1831, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00015761900687697865, |
| "loss": 1.226, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00015724245825088086, |
| "loss": 1.233, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00015686469853637192, |
| "loss": 1.2146, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00015648573572577839, |
| "loss": 1.1979, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0001561055778368807, |
| "loss": 1.1544, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00015572423291274393, |
| "loss": 1.1036, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00015534170902154742, |
| "loss": 0.9431, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00015495801425641407, |
| "loss": 0.9798, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001545731567352392, |
| "loss": 0.9381, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00015418714460051875, |
| "loss": 0.9564, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00015379998601917704, |
| "loss": 0.9039, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0001534116891823939, |
| "loss": 0.8984, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00015302226230543147, |
| "loss": 0.952, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00015263171362746026, |
| "loss": 0.94, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0001522400514113851, |
| "loss": 0.9551, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00015184728394366988, |
| "loss": 1.0139, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00015145341953416271, |
| "loss": 0.9337, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0001510584665159198, |
| "loss": 0.9504, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00015066243324502918, |
| "loss": 0.9361, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00015026532810043407, |
| "loss": 0.9395, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00014986715948375542, |
| "loss": 0.9442, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00014946793581911428, |
| "loss": 0.9664, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00014906766555295358, |
| "loss": 0.8896, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00014866635715385927, |
| "loss": 0.9328, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.0001482640191123813, |
| "loss": 0.8727, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00014786065994085396, |
| "loss": 0.8948, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00014745628817321578, |
| "loss": 0.9564, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00014705091236482887, |
| "loss": 0.9485, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00014664454109229808, |
| "loss": 0.931, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00014623718295328944, |
| "loss": 0.9614, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00014582884656634827, |
| "loss": 0.9347, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00014541954057071692, |
| "loss": 0.8861, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00014500927362615177, |
| "loss": 0.9045, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00014459805441274028, |
| "loss": 0.9271, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00014418589163071722, |
| "loss": 0.8829, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00014377279400028053, |
| "loss": 0.9252, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00014335877026140688, |
| "loss": 0.9299, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.000142943829173667, |
| "loss": 0.8597, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00014252797951603977, |
| "loss": 0.8919, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00014211123008672712, |
| "loss": 0.9356, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0001416935897029675, |
| "loss": 0.8959, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0001412750672008494, |
| "loss": 0.8936, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00014085567143512457, |
| "loss": 0.858, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00014043541127902037, |
| "loss": 0.8618, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00014001429562405225, |
| "loss": 0.89, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00013959233337983582, |
| "loss": 0.9272, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00013916953347389776, |
| "loss": 0.8889, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.0001387459048514876, |
| "loss": 0.8864, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00013832145647538799, |
| "loss": 0.9299, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00013789619732572538, |
| "loss": 0.9379, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00013747013639977973, |
| "loss": 0.8988, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001370432827117945, |
| "loss": 0.8828, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001366156452927856, |
| "loss": 0.8929, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00013618723319035056, |
| "loss": 0.903, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.000135758055468477, |
| "loss": 0.946, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00013532812120735087, |
| "loss": 0.8985, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.0001348974395031643, |
| "loss": 0.9617, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00013446601946792334, |
| "loss": 0.8937, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00013403387022925488, |
| "loss": 0.9223, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00013360100093021376, |
| "loss": 0.8876, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00013316742072908927, |
| "loss": 0.8735, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001327331387992114, |
| "loss": 0.8766, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00013229816432875664, |
| "loss": 0.8885, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00013186250652055378, |
| "loss": 0.8816, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00013142617459188899, |
| "loss": 0.8838, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.0001309891777743111, |
| "loss": 0.8981, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00013055152531343592, |
| "loss": 0.8672, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00013011322646875088, |
| "loss": 0.9303, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00012967429051341913, |
| "loss": 0.8727, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00012923472673408322, |
| "loss": 0.896, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0001287945444306686, |
| "loss": 0.9047, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00012835375291618716, |
| "loss": 0.8641, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00012791236151653973, |
| "loss": 0.8577, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00012747037957031916, |
| "loss": 0.8815, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00012702781642861253, |
| "loss": 0.9196, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00012658468145480337, |
| "loss": 0.8782, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00012614098402437366, |
| "loss": 0.953, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00012569673352470523, |
| "loss": 0.9381, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00012525193935488137, |
| "loss": 0.8818, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00012480661092548786, |
| "loss": 0.9057, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00012436075765841396, |
| "loss": 0.8975, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00012391438898665287, |
| "loss": 0.918, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00012346751435410248, |
| "loss": 0.9211, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.0001230201432153653, |
| "loss": 0.9608, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00012257228503554835, |
| "loss": 0.8869, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00012212394929006336, |
| "loss": 0.897, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00012167514546442576, |
| "loss": 0.8858, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00012122588305405434, |
| "loss": 0.8305, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.0001207761715640702, |
| "loss": 0.9205, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00012032602050909574, |
| "loss": 0.922, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00011987543941305321, |
| "loss": 0.9019, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00011942443780896351, |
| "loss": 0.9028, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00011897302523874405, |
| "loss": 0.9307, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.0001185212112530073, |
| "loss": 0.9475, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0001180690054108585, |
| "loss": 0.9168, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00011761641727969343, |
| "loss": 0.8799, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00011716345643499608, |
| "loss": 0.9363, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00011671013246013596, |
| "loss": 0.852, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00011625645494616535, |
| "loss": 0.9343, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.0001158024334916165, |
| "loss": 0.916, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00011534807770229845, |
| "loss": 0.9824, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00011489339719109378, |
| "loss": 0.9317, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00011443840157775527, |
| "loss": 0.9233, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00011398310048870247, |
| "loss": 0.9104, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00011352750355681772, |
| "loss": 0.9006, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00011307162042124277, |
| "loss": 0.926, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00011261546072717454, |
| "loss": 0.9303, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00011215903412566111, |
| "loss": 0.8762, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00011170235027339766, |
| "loss": 0.8965, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00011124541883252198, |
| "loss": 0.9352, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00011078824947041016, |
| "loss": 0.9081, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00011033085185947208, |
| "loss": 0.9391, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00010987323567694661, |
| "loss": 0.8827, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00010941541060469712, |
| "loss": 0.9139, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00010895738632900636, |
| "loss": 0.8791, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00010849917254037174, |
| "loss": 0.8964, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00010804077893330023, |
| "loss": 0.924, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00010758221520610321, |
| "loss": 0.8887, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00010712349106069131, |
| "loss": 0.9306, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00010666461620236922, |
| "loss": 0.8802, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00010620560033963025, |
| "loss": 0.897, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00010574645318395095, |
| "loss": 0.9017, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00010528718444958567, |
| "loss": 0.8665, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00010482780385336106, |
| "loss": 0.9159, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00010436832111447034, |
| "loss": 0.8568, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00010390874595426794, |
| "loss": 0.9258, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00010344908809606353, |
| "loss": 0.8684, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00010298935726491648, |
| "loss": 0.8991, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00010252956318743006, |
| "loss": 0.8716, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0001020697155915457, |
| "loss": 0.9192, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.000101609824206337, |
| "loss": 0.8241, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00010114989876180423, |
| "loss": 0.8991, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00010068994898866804, |
| "loss": 0.8729, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00010022998461816389, |
| "loss": 0.9218, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.977001538183616e-05, |
| "loss": 0.8121, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.9310051011332e-05, |
| "loss": 0.8794, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.88501012381958e-05, |
| "loss": 0.9085, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.839017579366299e-05, |
| "loss": 0.9281, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.793028440845434e-05, |
| "loss": 0.8583, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.747043681256996e-05, |
| "loss": 0.8277, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.701064273508356e-05, |
| "loss": 0.8954, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.65509119039365e-05, |
| "loss": 0.9306, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.609125404573211e-05, |
| "loss": 0.9431, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.563167888552968e-05, |
| "loss": 0.9396, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.517219614663896e-05, |
| "loss": 0.8953, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.471281555041432e-05, |
| "loss": 0.8922, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.425354681604907e-05, |
| "loss": 0.8737, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.379439966036977e-05, |
| "loss": 0.8988, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.333538379763079e-05, |
| "loss": 0.8868, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.28765089393087e-05, |
| "loss": 0.9303, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.241778479389683e-05, |
| "loss": 0.934, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.195922106669981e-05, |
| "loss": 0.8812, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.150082745962828e-05, |
| "loss": 0.867, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.104261367099365e-05, |
| "loss": 0.8649, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.058458939530295e-05, |
| "loss": 0.9221, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.01267643230534e-05, |
| "loss": 0.859, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.966914814052796e-05, |
| "loss": 0.9446, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.921175052958985e-05, |
| "loss": 0.885, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.875458116747806e-05, |
| "loss": 0.8868, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.829764972660237e-05, |
| "loss": 0.9057, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.78409658743389e-05, |
| "loss": 0.8473, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.738453927282548e-05, |
| "loss": 0.9346, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.692837957875725e-05, |
| "loss": 0.9158, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.647249644318232e-05, |
| "loss": 0.8827, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.601689951129757e-05, |
| "loss": 0.8713, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.556159842224472e-05, |
| "loss": 0.923, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.510660280890625e-05, |
| "loss": 0.8329, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.465192229770156e-05, |
| "loss": 0.8489, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.41975665083835e-05, |
| "loss": 0.8915, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.374354505383467e-05, |
| "loss": 0.8903, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.328986753986409e-05, |
| "loss": 0.913, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.283654356500394e-05, |
| "loss": 0.8553, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.238358272030658e-05, |
| "loss": 0.905, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.193099458914148e-05, |
| "loss": 0.8946, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.147878874699274e-05, |
| "loss": 0.9124, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.102697476125597e-05, |
| "loss": 0.8419, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.057556219103653e-05, |
| "loss": 0.905, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.012456058694678e-05, |
| "loss": 0.8736, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.967397949090431e-05, |
| "loss": 0.8963, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.922382843592984e-05, |
| "loss": 0.9139, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.87741169459457e-05, |
| "loss": 0.8537, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.832485453557424e-05, |
| "loss": 0.8808, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.787605070993668e-05, |
| "loss": 0.867, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.742771496445167e-05, |
| "loss": 0.8718, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.697985678463476e-05, |
| "loss": 0.8623, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.653248564589751e-05, |
| "loss": 0.8879, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.608561101334714e-05, |
| "loss": 0.854, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.563924234158607e-05, |
| "loss": 0.9452, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.519338907451215e-05, |
| "loss": 0.8661, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.474806064511863e-05, |
| "loss": 0.8852, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.43032664752948e-05, |
| "loss": 0.8795, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.385901597562637e-05, |
| "loss": 0.8973, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.341531854519664e-05, |
| "loss": 0.8834, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.29721835713875e-05, |
| "loss": 0.8979, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.25296204296809e-05, |
| "loss": 0.8892, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.208763848346029e-05, |
| "loss": 0.8541, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.164624708381285e-05, |
| "loss": 0.8822, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.120545556933138e-05, |
| "loss": 0.854, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.076527326591682e-05, |
| "loss": 0.8699, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 7.03257094865809e-05, |
| "loss": 0.9127, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.988677353124913e-05, |
| "loss": 0.894, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.94484746865641e-05, |
| "loss": 0.9312, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.901082222568895e-05, |
| "loss": 0.9396, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.857382540811101e-05, |
| "loss": 0.8763, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.813749347944625e-05, |
| "loss": 0.8556, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.770183567124337e-05, |
| "loss": 0.8683, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.726686120078862e-05, |
| "loss": 0.8735, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.683257927091074e-05, |
| "loss": 0.9203, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.639899906978626e-05, |
| "loss": 0.9083, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.596612977074515e-05, |
| "loss": 0.9017, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.553398053207671e-05, |
| "loss": 0.8662, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.510256049683571e-05, |
| "loss": 0.8486, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.467187879264916e-05, |
| "loss": 0.8897, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.4241944531523e-05, |
| "loss": 0.9247, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.381276680964947e-05, |
| "loss": 0.922, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.338435470721442e-05, |
| "loss": 0.897, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.295671728820553e-05, |
| "loss": 0.898, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.252986360022029e-05, |
| "loss": 0.8948, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.210380267427467e-05, |
| "loss": 0.8923, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.167854352461202e-05, |
| "loss": 0.8976, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.125409514851243e-05, |
| "loss": 0.9255, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.083046652610224e-05, |
| "loss": 0.8501, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.040766662016424e-05, |
| "loss": 0.9446, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.998570437594775e-05, |
| "loss": 0.8504, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.9564588720979655e-05, |
| "loss": 0.8629, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.914432856487544e-05, |
| "loss": 0.826, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.8724932799150586e-05, |
| "loss": 0.883, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.830641029703254e-05, |
| "loss": 0.8657, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.788876991327288e-05, |
| "loss": 0.893, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.747202048396023e-05, |
| "loss": 0.8951, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.705617082633306e-05, |
| "loss": 0.8968, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.664122973859313e-05, |
| "loss": 0.849, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.622720599971952e-05, |
| "loss": 0.8614, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.5814108369282824e-05, |
| "loss": 0.8236, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.540194558725973e-05, |
| "loss": 0.8196, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.4990726373848243e-05, |
| "loss": 0.8639, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.458045942928309e-05, |
| "loss": 0.8689, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.417115343365171e-05, |
| "loss": 0.8379, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.37628170467106e-05, |
| "loss": 0.8694, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.3355458907701925e-05, |
| "loss": 0.8661, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.2949087635171144e-05, |
| "loss": 0.6885, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.254371182678424e-05, |
| "loss": 0.6118, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.213934005914607e-05, |
| "loss": 0.6482, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.173598088761874e-05, |
| "loss": 0.5909, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.133364284614077e-05, |
| "loss": 0.6283, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.093233444704641e-05, |
| "loss": 0.5609, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 5.053206418088572e-05, |
| "loss": 0.5663, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 5.0132840516244604e-05, |
| "loss": 0.5876, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.9734671899565955e-05, |
| "loss": 0.6185, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.933756675497082e-05, |
| "loss": 0.6254, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.894153348408021e-05, |
| "loss": 0.5577, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.8546580465837274e-05, |
| "loss": 0.6386, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.815271605633012e-05, |
| "loss": 0.5778, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.775994858861492e-05, |
| "loss": 0.6156, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.7368286372539775e-05, |
| "loss": 0.6072, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.697773769456859e-05, |
| "loss": 0.6396, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.658831081760614e-05, |
| "loss": 0.585, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.6200013980822954e-05, |
| "loss": 0.5711, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.5812855399481256e-05, |
| "loss": 0.6314, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.542684326476082e-05, |
| "loss": 0.6257, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.504198574358596e-05, |
| "loss": 0.6016, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.465829097845261e-05, |
| "loss": 0.5966, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.427576708725609e-05, |
| "loss": 0.5442, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.389442216311933e-05, |
| "loss": 0.6366, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.351426427422165e-05, |
| "loss": 0.5575, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.313530146362809e-05, |
| "loss": 0.6181, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.275754174911921e-05, |
| "loss": 0.6203, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.2380993123021385e-05, |
| "loss": 0.5802, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.200566355203784e-05, |
| "loss": 0.5784, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.163156097708014e-05, |
| "loss": 0.6171, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.1258693313099996e-05, |
| "loss": 0.5918, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.088706844892182e-05, |
| "loss": 0.5546, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.051669424707602e-05, |
| "loss": 0.6072, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.014757854363249e-05, |
| "loss": 0.618, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.977972914803486e-05, |
| "loss": 0.5796, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.9413153842935255e-05, |
| "loss": 0.5466, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.9047860384029675e-05, |
| "loss": 0.5826, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.868385649989388e-05, |
| "loss": 0.5873, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.832114989181988e-05, |
| "loss": 0.5975, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.795974823365287e-05, |
| "loss": 0.6079, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.759965917162925e-05, |
| "loss": 0.5623, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.724089032421441e-05, |
| "loss": 0.5978, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.688344928194181e-05, |
| "loss": 0.5926, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.652734360725224e-05, |
| "loss": 0.5724, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.617258083433396e-05, |
| "loss": 0.5538, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.581916846896318e-05, |
| "loss": 0.5624, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.546711398834543e-05, |
| "loss": 0.5711, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.5116424840957065e-05, |
| "loss": 0.5629, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.476710844638795e-05, |
| "loss": 0.5501, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.441917219518438e-05, |
| "loss": 0.6033, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.407262344869272e-05, |
| "loss": 0.5593, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.3727469538903646e-05, |
| "loss": 0.5355, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.338371776829705e-05, |
| "loss": 0.5959, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.3041375409687526e-05, |
| "loss": 0.5828, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2700449706070534e-05, |
| "loss": 0.5922, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.236094787046901e-05, |
| "loss": 0.6114, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.202287708578097e-05, |
| "loss": 0.605, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.168624450462746e-05, |
| "loss": 0.6079, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.13510572492012e-05, |
| "loss": 0.5634, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.10173224111158e-05, |
| "loss": 0.4974, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.0685047051255946e-05, |
| "loss": 0.551, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.035423819962785e-05, |
| "loss": 0.6034, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.002490285521059e-05, |
| "loss": 0.5759, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9697047985807958e-05, |
| "loss": 0.5502, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9370680527901116e-05, |
| "loss": 0.5722, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.904580738650181e-05, |
| "loss": 0.559, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.872243543500629e-05, |
| "loss": 0.5352, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.840057151504979e-05, |
| "loss": 0.5857, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.8080222436361934e-05, |
| "loss": 0.5819, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7761394976622658e-05, |
| "loss": 0.5741, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.7444095881318656e-05, |
| "loss": 0.5694, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.712833186360072e-05, |
| "loss": 0.5797, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6814109604141848e-05, |
| "loss": 0.5583, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6501435750995727e-05, |
| "loss": 0.5771, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.619031691945618e-05, |
| "loss": 0.5822, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.588075969191718e-05, |
| "loss": 0.5831, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5572770617733544e-05, |
| "loss": 0.5801, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.5266356213082433e-05, |
| "loss": 0.5578, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.496152296082548e-05, |
| "loss": 0.6124, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.465827731037147e-05, |
| "loss": 0.6306, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.4356625677540233e-05, |
| "loss": 0.5585, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.405657444442657e-05, |
| "loss": 0.5284, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.3758129959265407e-05, |
| "loss": 0.6024, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.3461298536297328e-05, |
| "loss": 0.5512, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.3166086455635218e-05, |
| "loss": 0.5346, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2872499963131155e-05, |
| "loss": 0.5329, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.258054527024451e-05, |
| "loss": 0.5496, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.2290228553910242e-05, |
| "loss": 0.5877, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.2001555956408428e-05, |
| "loss": 0.5671, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1714533585234244e-05, |
| "loss": 0.5922, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.142916751296876e-05, |
| "loss": 0.6145, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.114546377715042e-05, |
| "loss": 0.5264, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.0863428380147344e-05, |
| "loss": 0.5404, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0583067289030335e-05, |
| "loss": 0.6048, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.030438643544663e-05, |
| "loss": 0.5456, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0027391715494347e-05, |
| "loss": 0.5801, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9752088989597795e-05, |
| "loss": 0.5813, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9478484082383562e-05, |
| "loss": 0.5641, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.9206582782557136e-05, |
| "loss": 0.5711, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.893639084278046e-05, |
| "loss": 0.5707, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.86679139795503e-05, |
| "loss": 0.5731, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8401157873077257e-05, |
| "loss": 0.5849, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8136128167165578e-05, |
| "loss": 0.581, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.787283046909376e-05, |
| "loss": 0.613, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7611270349495924e-05, |
| "loss": 0.59, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.735145334224394e-05, |
| "loss": 0.5961, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.7093384944330393e-05, |
| "loss": 0.5323, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6837070615752115e-05, |
| "loss": 0.5652, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6582515779394968e-05, |
| "loss": 0.5533, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.632972582091884e-05, |
| "loss": 0.5579, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.6078706088643836e-05, |
| "loss": 0.5813, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5829461893437015e-05, |
| "loss": 0.5158, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.558199850860016e-05, |
| "loss": 0.5997, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.533632116975814e-05, |
| "loss": 0.5398, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.5092435074748146e-05, |
| "loss": 0.599, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.48503453835097e-05, |
| "loss": 0.6448, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4610057217975526e-05, |
| "loss": 0.589, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4371575661963143e-05, |
| "loss": 0.5743, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4134905761067329e-05, |
| "loss": 0.5822, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3900052522553397e-05, |
| "loss": 0.5397, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3667020915251173e-05, |
| "loss": 0.5798, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3435815869449964e-05, |
| "loss": 0.5432, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3206442276794207e-05, |
| "loss": 0.6011, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.297890499017992e-05, |
| "loss": 0.5629, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2753208823652141e-05, |
| "loss": 0.5176, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2529358552302972e-05, |
| "loss": 0.5281, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.2307358912170686e-05, |
| "loss": 0.5945, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.2087214600139308e-05, |
| "loss": 0.551, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1868930273839473e-05, |
| "loss": 0.5338, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1652510551549723e-05, |
| "loss": 0.5745, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1437960012098892e-05, |
| "loss": 0.5542, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.1225283194769176e-05, |
| "loss": 0.5808, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.1014484599200125e-05, |
| "loss": 0.5374, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0805568685293422e-05, |
| "loss": 0.5708, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0598539873118552e-05, |
| "loss": 0.5936, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0393402542819231e-05, |
| "loss": 0.5621, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.0190161034520795e-05, |
| "loss": 0.5412, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.988819648238379e-06, |
| "loss": 0.5525, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.789382643785895e-06, |
| "loss": 0.5622, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.591854240685882e-06, |
| "loss": 0.5662, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.396238618080322e-06, |
| "loss": 0.5728, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.202539914642182e-06, |
| "loss": 0.5531, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.010762228487813e-06, |
| "loss": 0.5631, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.820909617090289e-06, |
| "loss": 0.5265, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.632986097193573e-06, |
| "loss": 0.5755, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.446995644727473e-06, |
| "loss": 0.5626, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.2629421947236e-06, |
| "loss": 0.618, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 8.080829641232013e-06, |
| "loss": 0.5434, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.900661837238977e-06, |
| "loss": 0.5201, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.72244259458531e-06, |
| "loss": 0.5886, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.546175683885814e-06, |
| "loss": 0.6115, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.371864834449405e-06, |
| "loss": 0.5531, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 7.199513734200369e-06, |
| "loss": 0.5619, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 7.029126029600197e-06, |
| "loss": 0.5434, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.860705325570494e-06, |
| "loss": 0.5542, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.694255185416687e-06, |
| "loss": 0.5529, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.529779130752678e-06, |
| "loss": 0.539, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.3672806414262765e-06, |
| "loss": 0.5577, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.206763155445627e-06, |
| "loss": 0.5307, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 6.0482300689064466e-06, |
| "loss": 0.5418, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.891684735920167e-06, |
| "loss": 0.5613, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.737130468542972e-06, |
| "loss": 0.5926, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.58457053670578e-06, |
| "loss": 0.5289, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.434008168144944e-06, |
| "loss": 0.5981, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.2854465483340725e-06, |
| "loss": 0.5581, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.1388888204165875e-06, |
| "loss": 0.5844, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.9943380851392604e-06, |
| "loss": 0.5871, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.851797400786506e-06, |
| "loss": 0.5835, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.7112697831158126e-06, |
| "loss": 0.5522, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.572758205293848e-06, |
| "loss": 0.6108, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.4362655978336e-06, |
| "loss": 0.5668, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.3017948485323255e-06, |
| "loss": 0.5755, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.169348802410522e-06, |
| "loss": 0.5658, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 4.038930261651674e-06, |
| "loss": 0.5555, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.910541985543014e-06, |
| "loss": 0.5277, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.7841866904170798e-06, |
| "loss": 0.5733, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.6598670495943123e-06, |
| "loss": 0.6289, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.537585693326484e-06, |
| "loss": 0.5355, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.4173452087410187e-06, |
| "loss": 0.6279, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.2991481397862568e-06, |
| "loss": 0.5598, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.1829969871776555e-06, |
| "loss": 0.5882, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.0688942083448967e-06, |
| "loss": 0.5646, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.9568422173798294e-06, |
| "loss": 0.6285, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.846843384985476e-06, |
| "loss": 0.5294, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.7389000384257955e-06, |
| "loss": 0.5308, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.633014461476524e-06, |
| "loss": 0.5364, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.5291888943767992e-06, |
| "loss": 0.5462, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.427425533781746e-06, |
| "loss": 0.6125, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.3277265327160904e-06, |
| "loss": 0.5888, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.2300940005285374e-06, |
| "loss": 0.5855, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.134530002847146e-06, |
| "loss": 0.5628, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0410365615356365e-06, |
| "loss": 0.5872, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.9496156546506274e-06, |
| "loss": 0.5642, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.8602692163997681e-06, |
| "loss": 0.5872, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.7729991371008502e-06, |
| "loss": 0.6044, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.6878072631417386e-06, |
| "loss": 0.5491, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.6046953969413915e-06, |
| "loss": 0.5291, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.5236652969116804e-06, |
| "loss": 0.539, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.444718677420176e-06, |
| "loss": 0.5518, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.367857208753931e-06, |
| "loss": 0.605, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.2930825170840877e-06, |
| "loss": 0.5938, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.2203961844315048e-06, |
| "loss": 0.5656, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.1497997486332513e-06, |
| "loss": 0.5663, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0812947033101207e-06, |
| "loss": 0.5731, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 1.0148824978349792e-06, |
| "loss": 0.5909, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.505645373021455e-07, |
| "loss": 0.5732, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.883421824976479e-07, |
| "loss": 0.5457, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.282167498703918e-07, |
| "loss": 0.559, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.701895115043822e-07, |
| "loss": 0.5706, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.142616950917446e-07, |
| "loss": 0.5509, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.604344839068021e-07, |
| "loss": 0.524, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 6.087090167809839e-07, |
| "loss": 0.5933, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.590863880788111e-07, |
| "loss": 0.5565, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 5.115676476746489e-07, |
| "loss": 0.5286, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.661538009305577e-07, |
| "loss": 0.629, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.2284580867500976e-07, |
| "loss": 0.5606, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.8164458718255025e-07, |
| "loss": 0.5308, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.4255100815442365e-07, |
| "loss": 0.5966, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 3.0556589870012196e-07, |
| "loss": 0.5551, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.7069004131987653e-07, |
| "loss": 0.5597, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.379241738881377e-07, |
| "loss": 0.5856, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.0726898963793205e-07, |
| "loss": 0.5698, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.78725137146174e-07, |
| "loss": 0.5685, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.5229322032002115e-07, |
| "loss": 0.5562, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.27973798384029e-07, |
| "loss": 0.5893, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 1.0576738586831614e-07, |
| "loss": 0.5793, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.567445259775042e-08, |
| "loss": 0.5354, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 6.769542368190162e-08, |
| "loss": 0.5889, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 5.183067950617071e-08, |
| "loss": 0.5501, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.808055572362967e-08, |
| "loss": 0.5596, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.6445343247982755e-08, |
| "loss": 0.5942, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.6925288247393588e-08, |
| "loss": 0.5747, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 9.52059213927825e-09, |
| "loss": 0.5802, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 4.231411586064216e-09, |
| "loss": 0.5792, |
| "step": 703 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 1.0578584918374823e-09, |
| "loss": 0.5585, |
| "step": 704 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.0, |
| "loss": 0.4152, |
| "step": 705 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 705, |
| "total_flos": 59407669002240.0, |
| "train_loss": 0.9080784771036594, |
| "train_runtime": 4849.5227, |
| "train_samples_per_second": 18.568, |
| "train_steps_per_second": 0.145 |
| } |
| ], |
| "max_steps": 705, |
| "num_train_epochs": 3, |
| "total_flos": 59407669002240.0, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|