Training in progress, step 3500, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 737580392
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cb53436017de402cbb3e140080f626c219565b34350f5e9e7ce03a50b8632f27
|
| 3 |
size 737580392
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1475248442
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b5cf04a4be9db3b89f43af102704d1b2bcf9d56231564ef6cbf77066898e38fe
|
| 3 |
size 1475248442
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1000
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6381e104397d82ea368bc2021f98181e9d693a8a280be6f239801ae904b1ab83
|
| 3 |
size 1000
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 1.
|
| 5 |
"eval_steps": 2.0,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2107,6 +2107,356 @@
|
|
| 2107 |
"learning_rate": 2.82427366447985e-05,
|
| 2108 |
"loss": 0.2173,
|
| 2109 |
"step": 3000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2110 |
}
|
| 2111 |
],
|
| 2112 |
"logging_steps": 10,
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 1.640112464854733,
|
| 5 |
"eval_steps": 2.0,
|
| 6 |
+
"global_step": 3500,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2107 |
"learning_rate": 2.82427366447985e-05,
|
| 2108 |
"loss": 0.2173,
|
| 2109 |
"step": 3000
|
| 2110 |
+
},
|
| 2111 |
+
{
|
| 2112 |
+
"epoch": 1.4104967197750704,
|
| 2113 |
+
"grad_norm": 1.8364263772964478,
|
| 2114 |
+
"learning_rate": 2.8236879100281162e-05,
|
| 2115 |
+
"loss": 0.2239,
|
| 2116 |
+
"step": 3010
|
| 2117 |
+
},
|
| 2118 |
+
{
|
| 2119 |
+
"epoch": 1.415182755388941,
|
| 2120 |
+
"grad_norm": 1.565895676612854,
|
| 2121 |
+
"learning_rate": 2.8231021555763825e-05,
|
| 2122 |
+
"loss": 0.2123,
|
| 2123 |
+
"step": 3020
|
| 2124 |
+
},
|
| 2125 |
+
{
|
| 2126 |
+
"epoch": 1.4198687910028116,
|
| 2127 |
+
"grad_norm": 1.5840765237808228,
|
| 2128 |
+
"learning_rate": 2.8225164011246487e-05,
|
| 2129 |
+
"loss": 0.2115,
|
| 2130 |
+
"step": 3030
|
| 2131 |
+
},
|
| 2132 |
+
{
|
| 2133 |
+
"epoch": 1.4245548266166823,
|
| 2134 |
+
"grad_norm": 1.595318078994751,
|
| 2135 |
+
"learning_rate": 2.821930646672915e-05,
|
| 2136 |
+
"loss": 0.1864,
|
| 2137 |
+
"step": 3040
|
| 2138 |
+
},
|
| 2139 |
+
{
|
| 2140 |
+
"epoch": 1.429240862230553,
|
| 2141 |
+
"grad_norm": 1.6979650259017944,
|
| 2142 |
+
"learning_rate": 2.821344892221181e-05,
|
| 2143 |
+
"loss": 0.212,
|
| 2144 |
+
"step": 3050
|
| 2145 |
+
},
|
| 2146 |
+
{
|
| 2147 |
+
"epoch": 1.4339268978444237,
|
| 2148 |
+
"grad_norm": 1.9207825660705566,
|
| 2149 |
+
"learning_rate": 2.8207591377694474e-05,
|
| 2150 |
+
"loss": 0.2162,
|
| 2151 |
+
"step": 3060
|
| 2152 |
+
},
|
| 2153 |
+
{
|
| 2154 |
+
"epoch": 1.4386129334582942,
|
| 2155 |
+
"grad_norm": 1.597359538078308,
|
| 2156 |
+
"learning_rate": 2.8201733833177133e-05,
|
| 2157 |
+
"loss": 0.2067,
|
| 2158 |
+
"step": 3070
|
| 2159 |
+
},
|
| 2160 |
+
{
|
| 2161 |
+
"epoch": 1.443298969072165,
|
| 2162 |
+
"grad_norm": 1.8102576732635498,
|
| 2163 |
+
"learning_rate": 2.8195876288659796e-05,
|
| 2164 |
+
"loss": 0.2056,
|
| 2165 |
+
"step": 3080
|
| 2166 |
+
},
|
| 2167 |
+
{
|
| 2168 |
+
"epoch": 1.4479850046860356,
|
| 2169 |
+
"grad_norm": 1.665094256401062,
|
| 2170 |
+
"learning_rate": 2.8190018744142455e-05,
|
| 2171 |
+
"loss": 0.1952,
|
| 2172 |
+
"step": 3090
|
| 2173 |
+
},
|
| 2174 |
+
{
|
| 2175 |
+
"epoch": 1.4526710402999063,
|
| 2176 |
+
"grad_norm": 1.6013555526733398,
|
| 2177 |
+
"learning_rate": 2.818416119962512e-05,
|
| 2178 |
+
"loss": 0.1906,
|
| 2179 |
+
"step": 3100
|
| 2180 |
+
},
|
| 2181 |
+
{
|
| 2182 |
+
"epoch": 1.457357075913777,
|
| 2183 |
+
"grad_norm": 1.7371183633804321,
|
| 2184 |
+
"learning_rate": 2.817830365510778e-05,
|
| 2185 |
+
"loss": 0.2575,
|
| 2186 |
+
"step": 3110
|
| 2187 |
+
},
|
| 2188 |
+
{
|
| 2189 |
+
"epoch": 1.4620431115276475,
|
| 2190 |
+
"grad_norm": 1.4190640449523926,
|
| 2191 |
+
"learning_rate": 2.817244611059044e-05,
|
| 2192 |
+
"loss": 0.203,
|
| 2193 |
+
"step": 3120
|
| 2194 |
+
},
|
| 2195 |
+
{
|
| 2196 |
+
"epoch": 1.4667291471415183,
|
| 2197 |
+
"grad_norm": 1.9308714866638184,
|
| 2198 |
+
"learning_rate": 2.81665885660731e-05,
|
| 2199 |
+
"loss": 0.232,
|
| 2200 |
+
"step": 3130
|
| 2201 |
+
},
|
| 2202 |
+
{
|
| 2203 |
+
"epoch": 1.471415182755389,
|
| 2204 |
+
"grad_norm": 1.9426283836364746,
|
| 2205 |
+
"learning_rate": 2.8160731021555766e-05,
|
| 2206 |
+
"loss": 0.1885,
|
| 2207 |
+
"step": 3140
|
| 2208 |
+
},
|
| 2209 |
+
{
|
| 2210 |
+
"epoch": 1.4761012183692597,
|
| 2211 |
+
"grad_norm": 1.6833382844924927,
|
| 2212 |
+
"learning_rate": 2.8154873477038425e-05,
|
| 2213 |
+
"loss": 0.1841,
|
| 2214 |
+
"step": 3150
|
| 2215 |
+
},
|
| 2216 |
+
{
|
| 2217 |
+
"epoch": 1.4807872539831304,
|
| 2218 |
+
"grad_norm": 1.7456724643707275,
|
| 2219 |
+
"learning_rate": 2.8149015932521088e-05,
|
| 2220 |
+
"loss": 0.2141,
|
| 2221 |
+
"step": 3160
|
| 2222 |
+
},
|
| 2223 |
+
{
|
| 2224 |
+
"epoch": 1.4854732895970009,
|
| 2225 |
+
"grad_norm": 1.4256864786148071,
|
| 2226 |
+
"learning_rate": 2.8143158388003747e-05,
|
| 2227 |
+
"loss": 0.218,
|
| 2228 |
+
"step": 3170
|
| 2229 |
+
},
|
| 2230 |
+
{
|
| 2231 |
+
"epoch": 1.4901593252108716,
|
| 2232 |
+
"grad_norm": 2.305438280105591,
|
| 2233 |
+
"learning_rate": 2.8137300843486413e-05,
|
| 2234 |
+
"loss": 0.234,
|
| 2235 |
+
"step": 3180
|
| 2236 |
+
},
|
| 2237 |
+
{
|
| 2238 |
+
"epoch": 1.4948453608247423,
|
| 2239 |
+
"grad_norm": 1.771371841430664,
|
| 2240 |
+
"learning_rate": 2.813144329896907e-05,
|
| 2241 |
+
"loss": 0.2251,
|
| 2242 |
+
"step": 3190
|
| 2243 |
+
},
|
| 2244 |
+
{
|
| 2245 |
+
"epoch": 1.499531396438613,
|
| 2246 |
+
"grad_norm": 1.5220532417297363,
|
| 2247 |
+
"learning_rate": 2.8125585754451734e-05,
|
| 2248 |
+
"loss": 0.2034,
|
| 2249 |
+
"step": 3200
|
| 2250 |
+
},
|
| 2251 |
+
{
|
| 2252 |
+
"epoch": 1.5042174320524837,
|
| 2253 |
+
"grad_norm": 1.782240390777588,
|
| 2254 |
+
"learning_rate": 2.8119728209934396e-05,
|
| 2255 |
+
"loss": 0.2212,
|
| 2256 |
+
"step": 3210
|
| 2257 |
+
},
|
| 2258 |
+
{
|
| 2259 |
+
"epoch": 1.5089034676663542,
|
| 2260 |
+
"grad_norm": 1.8846886157989502,
|
| 2261 |
+
"learning_rate": 2.811387066541706e-05,
|
| 2262 |
+
"loss": 0.1933,
|
| 2263 |
+
"step": 3220
|
| 2264 |
+
},
|
| 2265 |
+
{
|
| 2266 |
+
"epoch": 1.513589503280225,
|
| 2267 |
+
"grad_norm": 1.5082029104232788,
|
| 2268 |
+
"learning_rate": 2.8108013120899718e-05,
|
| 2269 |
+
"loss": 0.1816,
|
| 2270 |
+
"step": 3230
|
| 2271 |
+
},
|
| 2272 |
+
{
|
| 2273 |
+
"epoch": 1.5182755388940956,
|
| 2274 |
+
"grad_norm": 1.4220314025878906,
|
| 2275 |
+
"learning_rate": 2.810215557638238e-05,
|
| 2276 |
+
"loss": 0.2041,
|
| 2277 |
+
"step": 3240
|
| 2278 |
+
},
|
| 2279 |
+
{
|
| 2280 |
+
"epoch": 1.522961574507966,
|
| 2281 |
+
"grad_norm": 2.5237534046173096,
|
| 2282 |
+
"learning_rate": 2.8096298031865043e-05,
|
| 2283 |
+
"loss": 0.2104,
|
| 2284 |
+
"step": 3250
|
| 2285 |
+
},
|
| 2286 |
+
{
|
| 2287 |
+
"epoch": 1.527647610121837,
|
| 2288 |
+
"grad_norm": 1.8641676902770996,
|
| 2289 |
+
"learning_rate": 2.8090440487347705e-05,
|
| 2290 |
+
"loss": 0.2253,
|
| 2291 |
+
"step": 3260
|
| 2292 |
+
},
|
| 2293 |
+
{
|
| 2294 |
+
"epoch": 1.5323336457357075,
|
| 2295 |
+
"grad_norm": 1.6378123760223389,
|
| 2296 |
+
"learning_rate": 2.8084582942830367e-05,
|
| 2297 |
+
"loss": 0.1995,
|
| 2298 |
+
"step": 3270
|
| 2299 |
+
},
|
| 2300 |
+
{
|
| 2301 |
+
"epoch": 1.5370196813495782,
|
| 2302 |
+
"grad_norm": 1.7604594230651855,
|
| 2303 |
+
"learning_rate": 2.8078725398313026e-05,
|
| 2304 |
+
"loss": 0.2078,
|
| 2305 |
+
"step": 3280
|
| 2306 |
+
},
|
| 2307 |
+
{
|
| 2308 |
+
"epoch": 1.541705716963449,
|
| 2309 |
+
"grad_norm": 2.0301594734191895,
|
| 2310 |
+
"learning_rate": 2.8072867853795692e-05,
|
| 2311 |
+
"loss": 0.2207,
|
| 2312 |
+
"step": 3290
|
| 2313 |
+
},
|
| 2314 |
+
{
|
| 2315 |
+
"epoch": 1.5463917525773194,
|
| 2316 |
+
"grad_norm": 1.7639228105545044,
|
| 2317 |
+
"learning_rate": 2.806701030927835e-05,
|
| 2318 |
+
"loss": 0.2543,
|
| 2319 |
+
"step": 3300
|
| 2320 |
+
},
|
| 2321 |
+
{
|
| 2322 |
+
"epoch": 1.5510777881911904,
|
| 2323 |
+
"grad_norm": 1.3346034288406372,
|
| 2324 |
+
"learning_rate": 2.8061152764761013e-05,
|
| 2325 |
+
"loss": 0.206,
|
| 2326 |
+
"step": 3310
|
| 2327 |
+
},
|
| 2328 |
+
{
|
| 2329 |
+
"epoch": 1.5557638238050608,
|
| 2330 |
+
"grad_norm": 1.0351877212524414,
|
| 2331 |
+
"learning_rate": 2.8055295220243672e-05,
|
| 2332 |
+
"loss": 0.2225,
|
| 2333 |
+
"step": 3320
|
| 2334 |
+
},
|
| 2335 |
+
{
|
| 2336 |
+
"epoch": 1.5604498594189316,
|
| 2337 |
+
"grad_norm": 1.6889092922210693,
|
| 2338 |
+
"learning_rate": 2.8049437675726338e-05,
|
| 2339 |
+
"loss": 0.2162,
|
| 2340 |
+
"step": 3330
|
| 2341 |
+
},
|
| 2342 |
+
{
|
| 2343 |
+
"epoch": 1.5651358950328023,
|
| 2344 |
+
"grad_norm": 1.7433794736862183,
|
| 2345 |
+
"learning_rate": 2.8043580131208997e-05,
|
| 2346 |
+
"loss": 0.2479,
|
| 2347 |
+
"step": 3340
|
| 2348 |
+
},
|
| 2349 |
+
{
|
| 2350 |
+
"epoch": 1.569821930646673,
|
| 2351 |
+
"grad_norm": 1.883331060409546,
|
| 2352 |
+
"learning_rate": 2.803772258669166e-05,
|
| 2353 |
+
"loss": 0.2169,
|
| 2354 |
+
"step": 3350
|
| 2355 |
+
},
|
| 2356 |
+
{
|
| 2357 |
+
"epoch": 1.5745079662605437,
|
| 2358 |
+
"grad_norm": 1.9640493392944336,
|
| 2359 |
+
"learning_rate": 2.8031865042174322e-05,
|
| 2360 |
+
"loss": 0.2186,
|
| 2361 |
+
"step": 3360
|
| 2362 |
+
},
|
| 2363 |
+
{
|
| 2364 |
+
"epoch": 1.5791940018744142,
|
| 2365 |
+
"grad_norm": 1.5412628650665283,
|
| 2366 |
+
"learning_rate": 2.8026007497656984e-05,
|
| 2367 |
+
"loss": 0.2374,
|
| 2368 |
+
"step": 3370
|
| 2369 |
+
},
|
| 2370 |
+
{
|
| 2371 |
+
"epoch": 1.5838800374882849,
|
| 2372 |
+
"grad_norm": 1.5945382118225098,
|
| 2373 |
+
"learning_rate": 2.8020149953139643e-05,
|
| 2374 |
+
"loss": 0.2022,
|
| 2375 |
+
"step": 3380
|
| 2376 |
+
},
|
| 2377 |
+
{
|
| 2378 |
+
"epoch": 1.5885660731021556,
|
| 2379 |
+
"grad_norm": 1.8626048564910889,
|
| 2380 |
+
"learning_rate": 2.8014292408622306e-05,
|
| 2381 |
+
"loss": 0.2264,
|
| 2382 |
+
"step": 3390
|
| 2383 |
+
},
|
| 2384 |
+
{
|
| 2385 |
+
"epoch": 1.5932521087160263,
|
| 2386 |
+
"grad_norm": 1.4371192455291748,
|
| 2387 |
+
"learning_rate": 2.8008434864104968e-05,
|
| 2388 |
+
"loss": 0.2032,
|
| 2389 |
+
"step": 3400
|
| 2390 |
+
},
|
| 2391 |
+
{
|
| 2392 |
+
"epoch": 1.597938144329897,
|
| 2393 |
+
"grad_norm": 1.1432685852050781,
|
| 2394 |
+
"learning_rate": 2.800257731958763e-05,
|
| 2395 |
+
"loss": 0.1795,
|
| 2396 |
+
"step": 3410
|
| 2397 |
+
},
|
| 2398 |
+
{
|
| 2399 |
+
"epoch": 1.6026241799437675,
|
| 2400 |
+
"grad_norm": 1.5578374862670898,
|
| 2401 |
+
"learning_rate": 2.799671977507029e-05,
|
| 2402 |
+
"loss": 0.2066,
|
| 2403 |
+
"step": 3420
|
| 2404 |
+
},
|
| 2405 |
+
{
|
| 2406 |
+
"epoch": 1.6073102155576382,
|
| 2407 |
+
"grad_norm": 1.7294032573699951,
|
| 2408 |
+
"learning_rate": 2.7990862230552952e-05,
|
| 2409 |
+
"loss": 0.2319,
|
| 2410 |
+
"step": 3430
|
| 2411 |
+
},
|
| 2412 |
+
{
|
| 2413 |
+
"epoch": 1.611996251171509,
|
| 2414 |
+
"grad_norm": 1.1976913213729858,
|
| 2415 |
+
"learning_rate": 2.7985004686035614e-05,
|
| 2416 |
+
"loss": 0.1805,
|
| 2417 |
+
"step": 3440
|
| 2418 |
+
},
|
| 2419 |
+
{
|
| 2420 |
+
"epoch": 1.6166822867853796,
|
| 2421 |
+
"grad_norm": 1.4271414279937744,
|
| 2422 |
+
"learning_rate": 2.7979147141518277e-05,
|
| 2423 |
+
"loss": 0.2059,
|
| 2424 |
+
"step": 3450
|
| 2425 |
+
},
|
| 2426 |
+
{
|
| 2427 |
+
"epoch": 1.6213683223992503,
|
| 2428 |
+
"grad_norm": 2.3763184547424316,
|
| 2429 |
+
"learning_rate": 2.7973289597000936e-05,
|
| 2430 |
+
"loss": 0.2138,
|
| 2431 |
+
"step": 3460
|
| 2432 |
+
},
|
| 2433 |
+
{
|
| 2434 |
+
"epoch": 1.6260543580131208,
|
| 2435 |
+
"grad_norm": 2.302598476409912,
|
| 2436 |
+
"learning_rate": 2.7967432052483598e-05,
|
| 2437 |
+
"loss": 0.2158,
|
| 2438 |
+
"step": 3470
|
| 2439 |
+
},
|
| 2440 |
+
{
|
| 2441 |
+
"epoch": 1.6307403936269915,
|
| 2442 |
+
"grad_norm": 1.8099658489227295,
|
| 2443 |
+
"learning_rate": 2.796157450796626e-05,
|
| 2444 |
+
"loss": 0.1839,
|
| 2445 |
+
"step": 3480
|
| 2446 |
+
},
|
| 2447 |
+
{
|
| 2448 |
+
"epoch": 1.6354264292408622,
|
| 2449 |
+
"grad_norm": 1.8809782266616821,
|
| 2450 |
+
"learning_rate": 2.7955716963448923e-05,
|
| 2451 |
+
"loss": 0.1924,
|
| 2452 |
+
"step": 3490
|
| 2453 |
+
},
|
| 2454 |
+
{
|
| 2455 |
+
"epoch": 1.640112464854733,
|
| 2456 |
+
"grad_norm": 1.3889508247375488,
|
| 2457 |
+
"learning_rate": 2.7949859418931585e-05,
|
| 2458 |
+
"loss": 0.1801,
|
| 2459 |
+
"step": 3500
|
| 2460 |
}
|
| 2461 |
],
|
| 2462 |
"logging_steps": 10,
|