Training in progress, step 350, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1279323952
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:552e477a98c0768adcd4647d633d95db645032bd3f24e1fbfae67c6bab7019be
|
| 3 |
size 1279323952
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 650153044
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:398600ed212b3e574407dad7720c69a44d8b3f066be917bd7839a180244a2e8b
|
| 3 |
size 650153044
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4c974a18173c5b253003de5cec0751303a41dda5f56e281ac6e5b3f9dfe4eec8
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ef796d1416c59d81a908d0d533ec75f7cbfbe54b55c3fcdeb18a69501c216c1e
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": 1.4393945932388306,
|
| 3 |
"best_model_checkpoint": "miner_id_24/checkpoint-200",
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 50,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2163,6 +2163,364 @@
|
|
| 2163 |
"eval_samples_per_second": 4.645,
|
| 2164 |
"eval_steps_per_second": 4.645,
|
| 2165 |
"step": 300
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2166 |
}
|
| 2167 |
],
|
| 2168 |
"logging_steps": 1,
|
|
@@ -2177,7 +2535,7 @@
|
|
| 2177 |
"early_stopping_threshold": 0.0
|
| 2178 |
},
|
| 2179 |
"attributes": {
|
| 2180 |
-
"early_stopping_patience_counter":
|
| 2181 |
}
|
| 2182 |
},
|
| 2183 |
"TrainerControl": {
|
|
@@ -2186,12 +2544,12 @@
|
|
| 2186 |
"should_evaluate": false,
|
| 2187 |
"should_log": false,
|
| 2188 |
"should_save": true,
|
| 2189 |
-
"should_training_stop":
|
| 2190 |
},
|
| 2191 |
"attributes": {}
|
| 2192 |
}
|
| 2193 |
},
|
| 2194 |
-
"total_flos": 1.
|
| 2195 |
"train_batch_size": 1,
|
| 2196 |
"trial_name": null,
|
| 2197 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": 1.4393945932388306,
|
| 3 |
"best_model_checkpoint": "miner_id_24/checkpoint-200",
|
| 4 |
+
"epoch": 0.0764515556526369,
|
| 5 |
"eval_steps": 50,
|
| 6 |
+
"global_step": 350,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2163 |
"eval_samples_per_second": 4.645,
|
| 2164 |
"eval_steps_per_second": 4.645,
|
| 2165 |
"step": 300
|
| 2166 |
+
},
|
| 2167 |
+
{
|
| 2168 |
+
"epoch": 0.06574833786126773,
|
| 2169 |
+
"grad_norm": 1.63740873336792,
|
| 2170 |
+
"learning_rate": 0.0002996673213206589,
|
| 2171 |
+
"loss": 1.9815,
|
| 2172 |
+
"step": 301
|
| 2173 |
+
},
|
| 2174 |
+
{
|
| 2175 |
+
"epoch": 0.06596677087741812,
|
| 2176 |
+
"grad_norm": 1.046769380569458,
|
| 2177 |
+
"learning_rate": 0.000299665031793473,
|
| 2178 |
+
"loss": 1.6878,
|
| 2179 |
+
"step": 302
|
| 2180 |
+
},
|
| 2181 |
+
{
|
| 2182 |
+
"epoch": 0.06618520389356851,
|
| 2183 |
+
"grad_norm": 0.513561487197876,
|
| 2184 |
+
"learning_rate": 0.0002996627344237122,
|
| 2185 |
+
"loss": 1.5141,
|
| 2186 |
+
"step": 303
|
| 2187 |
+
},
|
| 2188 |
+
{
|
| 2189 |
+
"epoch": 0.0664036369097189,
|
| 2190 |
+
"grad_norm": 0.38992562890052795,
|
| 2191 |
+
"learning_rate": 0.0002996604292114971,
|
| 2192 |
+
"loss": 1.1818,
|
| 2193 |
+
"step": 304
|
| 2194 |
+
},
|
| 2195 |
+
{
|
| 2196 |
+
"epoch": 0.06662206992586929,
|
| 2197 |
+
"grad_norm": 0.4074482023715973,
|
| 2198 |
+
"learning_rate": 0.00029965811615694844,
|
| 2199 |
+
"loss": 1.4627,
|
| 2200 |
+
"step": 305
|
| 2201 |
+
},
|
| 2202 |
+
{
|
| 2203 |
+
"epoch": 0.06684050294201968,
|
| 2204 |
+
"grad_norm": 0.41403788328170776,
|
| 2205 |
+
"learning_rate": 0.0002996557952601874,
|
| 2206 |
+
"loss": 1.5682,
|
| 2207 |
+
"step": 306
|
| 2208 |
+
},
|
| 2209 |
+
{
|
| 2210 |
+
"epoch": 0.06705893595817007,
|
| 2211 |
+
"grad_norm": 0.4047700762748718,
|
| 2212 |
+
"learning_rate": 0.00029965346652133565,
|
| 2213 |
+
"loss": 1.0902,
|
| 2214 |
+
"step": 307
|
| 2215 |
+
},
|
| 2216 |
+
{
|
| 2217 |
+
"epoch": 0.06727736897432046,
|
| 2218 |
+
"grad_norm": 0.4367327094078064,
|
| 2219 |
+
"learning_rate": 0.0002996511299405151,
|
| 2220 |
+
"loss": 1.3241,
|
| 2221 |
+
"step": 308
|
| 2222 |
+
},
|
| 2223 |
+
{
|
| 2224 |
+
"epoch": 0.06749580199047087,
|
| 2225 |
+
"grad_norm": 0.40244677662849426,
|
| 2226 |
+
"learning_rate": 0.0002996487855178483,
|
| 2227 |
+
"loss": 1.2218,
|
| 2228 |
+
"step": 309
|
| 2229 |
+
},
|
| 2230 |
+
{
|
| 2231 |
+
"epoch": 0.06771423500662126,
|
| 2232 |
+
"grad_norm": 0.42241427302360535,
|
| 2233 |
+
"learning_rate": 0.0002996464332534581,
|
| 2234 |
+
"loss": 1.0742,
|
| 2235 |
+
"step": 310
|
| 2236 |
+
},
|
| 2237 |
+
{
|
| 2238 |
+
"epoch": 0.06793266802277165,
|
| 2239 |
+
"grad_norm": 0.4748789966106415,
|
| 2240 |
+
"learning_rate": 0.00029964407314746764,
|
| 2241 |
+
"loss": 1.3064,
|
| 2242 |
+
"step": 311
|
| 2243 |
+
},
|
| 2244 |
+
{
|
| 2245 |
+
"epoch": 0.06815110103892204,
|
| 2246 |
+
"grad_norm": 0.4514020085334778,
|
| 2247 |
+
"learning_rate": 0.00029964170520000073,
|
| 2248 |
+
"loss": 1.1954,
|
| 2249 |
+
"step": 312
|
| 2250 |
+
},
|
| 2251 |
+
{
|
| 2252 |
+
"epoch": 0.06836953405507243,
|
| 2253 |
+
"grad_norm": 0.5442508459091187,
|
| 2254 |
+
"learning_rate": 0.0002996393294111814,
|
| 2255 |
+
"loss": 1.2674,
|
| 2256 |
+
"step": 313
|
| 2257 |
+
},
|
| 2258 |
+
{
|
| 2259 |
+
"epoch": 0.06858796707122282,
|
| 2260 |
+
"grad_norm": 0.4211152195930481,
|
| 2261 |
+
"learning_rate": 0.00029963694578113417,
|
| 2262 |
+
"loss": 0.9404,
|
| 2263 |
+
"step": 314
|
| 2264 |
+
},
|
| 2265 |
+
{
|
| 2266 |
+
"epoch": 0.06880640008737321,
|
| 2267 |
+
"grad_norm": 0.44201675057411194,
|
| 2268 |
+
"learning_rate": 0.0002996345543099839,
|
| 2269 |
+
"loss": 1.363,
|
| 2270 |
+
"step": 315
|
| 2271 |
+
},
|
| 2272 |
+
{
|
| 2273 |
+
"epoch": 0.0690248331035236,
|
| 2274 |
+
"grad_norm": 0.4818100333213806,
|
| 2275 |
+
"learning_rate": 0.0002996321549978559,
|
| 2276 |
+
"loss": 1.1752,
|
| 2277 |
+
"step": 316
|
| 2278 |
+
},
|
| 2279 |
+
{
|
| 2280 |
+
"epoch": 0.06924326611967399,
|
| 2281 |
+
"grad_norm": 0.5382769107818604,
|
| 2282 |
+
"learning_rate": 0.0002996297478448759,
|
| 2283 |
+
"loss": 1.3113,
|
| 2284 |
+
"step": 317
|
| 2285 |
+
},
|
| 2286 |
+
{
|
| 2287 |
+
"epoch": 0.06946169913582438,
|
| 2288 |
+
"grad_norm": 0.44174817204475403,
|
| 2289 |
+
"learning_rate": 0.0002996273328511701,
|
| 2290 |
+
"loss": 1.4191,
|
| 2291 |
+
"step": 318
|
| 2292 |
+
},
|
| 2293 |
+
{
|
| 2294 |
+
"epoch": 0.06968013215197477,
|
| 2295 |
+
"grad_norm": 0.5009336471557617,
|
| 2296 |
+
"learning_rate": 0.000299624910016865,
|
| 2297 |
+
"loss": 1.1101,
|
| 2298 |
+
"step": 319
|
| 2299 |
+
},
|
| 2300 |
+
{
|
| 2301 |
+
"epoch": 0.06989856516812516,
|
| 2302 |
+
"grad_norm": 0.4469683766365051,
|
| 2303 |
+
"learning_rate": 0.0002996224793420875,
|
| 2304 |
+
"loss": 1.2823,
|
| 2305 |
+
"step": 320
|
| 2306 |
+
},
|
| 2307 |
+
{
|
| 2308 |
+
"epoch": 0.07011699818427555,
|
| 2309 |
+
"grad_norm": 0.4948316216468811,
|
| 2310 |
+
"learning_rate": 0.0002996200408269651,
|
| 2311 |
+
"loss": 1.3302,
|
| 2312 |
+
"step": 321
|
| 2313 |
+
},
|
| 2314 |
+
{
|
| 2315 |
+
"epoch": 0.07033543120042594,
|
| 2316 |
+
"grad_norm": 0.5020711421966553,
|
| 2317 |
+
"learning_rate": 0.0002996175944716255,
|
| 2318 |
+
"loss": 1.0552,
|
| 2319 |
+
"step": 322
|
| 2320 |
+
},
|
| 2321 |
+
{
|
| 2322 |
+
"epoch": 0.07055386421657633,
|
| 2323 |
+
"grad_norm": 0.528529703617096,
|
| 2324 |
+
"learning_rate": 0.0002996151402761969,
|
| 2325 |
+
"loss": 1.2313,
|
| 2326 |
+
"step": 323
|
| 2327 |
+
},
|
| 2328 |
+
{
|
| 2329 |
+
"epoch": 0.07077229723272672,
|
| 2330 |
+
"grad_norm": 0.48190081119537354,
|
| 2331 |
+
"learning_rate": 0.00029961267824080785,
|
| 2332 |
+
"loss": 1.2774,
|
| 2333 |
+
"step": 324
|
| 2334 |
+
},
|
| 2335 |
+
{
|
| 2336 |
+
"epoch": 0.07099073024887712,
|
| 2337 |
+
"grad_norm": 0.43585675954818726,
|
| 2338 |
+
"learning_rate": 0.0002996102083655875,
|
| 2339 |
+
"loss": 0.8383,
|
| 2340 |
+
"step": 325
|
| 2341 |
+
},
|
| 2342 |
+
{
|
| 2343 |
+
"epoch": 0.0712091632650275,
|
| 2344 |
+
"grad_norm": 0.4349531829357147,
|
| 2345 |
+
"learning_rate": 0.00029960773065066515,
|
| 2346 |
+
"loss": 1.0135,
|
| 2347 |
+
"step": 326
|
| 2348 |
+
},
|
| 2349 |
+
{
|
| 2350 |
+
"epoch": 0.0714275962811779,
|
| 2351 |
+
"grad_norm": 0.45253872871398926,
|
| 2352 |
+
"learning_rate": 0.00029960524509617067,
|
| 2353 |
+
"loss": 1.0758,
|
| 2354 |
+
"step": 327
|
| 2355 |
+
},
|
| 2356 |
+
{
|
| 2357 |
+
"epoch": 0.07164602929732829,
|
| 2358 |
+
"grad_norm": 0.48265528678894043,
|
| 2359 |
+
"learning_rate": 0.0002996027517022343,
|
| 2360 |
+
"loss": 1.0895,
|
| 2361 |
+
"step": 328
|
| 2362 |
+
},
|
| 2363 |
+
{
|
| 2364 |
+
"epoch": 0.07186446231347868,
|
| 2365 |
+
"grad_norm": 0.4599095582962036,
|
| 2366 |
+
"learning_rate": 0.0002996002504689867,
|
| 2367 |
+
"loss": 0.9751,
|
| 2368 |
+
"step": 329
|
| 2369 |
+
},
|
| 2370 |
+
{
|
| 2371 |
+
"epoch": 0.07208289532962907,
|
| 2372 |
+
"grad_norm": 0.4035511016845703,
|
| 2373 |
+
"learning_rate": 0.000299597741396559,
|
| 2374 |
+
"loss": 0.9046,
|
| 2375 |
+
"step": 330
|
| 2376 |
+
},
|
| 2377 |
+
{
|
| 2378 |
+
"epoch": 0.07230132834577946,
|
| 2379 |
+
"grad_norm": 0.44477182626724243,
|
| 2380 |
+
"learning_rate": 0.0002995952244850826,
|
| 2381 |
+
"loss": 0.8351,
|
| 2382 |
+
"step": 331
|
| 2383 |
+
},
|
| 2384 |
+
{
|
| 2385 |
+
"epoch": 0.07251976136192985,
|
| 2386 |
+
"grad_norm": 0.49605095386505127,
|
| 2387 |
+
"learning_rate": 0.00029959269973468935,
|
| 2388 |
+
"loss": 1.1617,
|
| 2389 |
+
"step": 332
|
| 2390 |
+
},
|
| 2391 |
+
{
|
| 2392 |
+
"epoch": 0.07273819437808025,
|
| 2393 |
+
"grad_norm": 0.4683946967124939,
|
| 2394 |
+
"learning_rate": 0.00029959016714551165,
|
| 2395 |
+
"loss": 0.815,
|
| 2396 |
+
"step": 333
|
| 2397 |
+
},
|
| 2398 |
+
{
|
| 2399 |
+
"epoch": 0.07295662739423064,
|
| 2400 |
+
"grad_norm": 0.5074111223220825,
|
| 2401 |
+
"learning_rate": 0.00029958762671768223,
|
| 2402 |
+
"loss": 1.08,
|
| 2403 |
+
"step": 334
|
| 2404 |
+
},
|
| 2405 |
+
{
|
| 2406 |
+
"epoch": 0.07317506041038103,
|
| 2407 |
+
"grad_norm": 0.4522974491119385,
|
| 2408 |
+
"learning_rate": 0.000299585078451334,
|
| 2409 |
+
"loss": 1.0445,
|
| 2410 |
+
"step": 335
|
| 2411 |
+
},
|
| 2412 |
+
{
|
| 2413 |
+
"epoch": 0.07339349342653143,
|
| 2414 |
+
"grad_norm": 0.49050214886665344,
|
| 2415 |
+
"learning_rate": 0.00029958252234660077,
|
| 2416 |
+
"loss": 1.023,
|
| 2417 |
+
"step": 336
|
| 2418 |
+
},
|
| 2419 |
+
{
|
| 2420 |
+
"epoch": 0.07361192644268182,
|
| 2421 |
+
"grad_norm": 0.5193130373954773,
|
| 2422 |
+
"learning_rate": 0.0002995799584036163,
|
| 2423 |
+
"loss": 0.9875,
|
| 2424 |
+
"step": 337
|
| 2425 |
+
},
|
| 2426 |
+
{
|
| 2427 |
+
"epoch": 0.0738303594588322,
|
| 2428 |
+
"grad_norm": 0.4777398109436035,
|
| 2429 |
+
"learning_rate": 0.000299577386622515,
|
| 2430 |
+
"loss": 1.0041,
|
| 2431 |
+
"step": 338
|
| 2432 |
+
},
|
| 2433 |
+
{
|
| 2434 |
+
"epoch": 0.0740487924749826,
|
| 2435 |
+
"grad_norm": 0.5704786777496338,
|
| 2436 |
+
"learning_rate": 0.0002995748070034317,
|
| 2437 |
+
"loss": 1.2806,
|
| 2438 |
+
"step": 339
|
| 2439 |
+
},
|
| 2440 |
+
{
|
| 2441 |
+
"epoch": 0.07426722549113299,
|
| 2442 |
+
"grad_norm": 0.5965543985366821,
|
| 2443 |
+
"learning_rate": 0.0002995722195465013,
|
| 2444 |
+
"loss": 1.2449,
|
| 2445 |
+
"step": 340
|
| 2446 |
+
},
|
| 2447 |
+
{
|
| 2448 |
+
"epoch": 0.07448565850728338,
|
| 2449 |
+
"grad_norm": 0.5655717849731445,
|
| 2450 |
+
"learning_rate": 0.0002995696242518598,
|
| 2451 |
+
"loss": 1.492,
|
| 2452 |
+
"step": 341
|
| 2453 |
+
},
|
| 2454 |
+
{
|
| 2455 |
+
"epoch": 0.07470409152343377,
|
| 2456 |
+
"grad_norm": 0.8123418092727661,
|
| 2457 |
+
"learning_rate": 0.0002995670211196429,
|
| 2458 |
+
"loss": 1.2705,
|
| 2459 |
+
"step": 342
|
| 2460 |
+
},
|
| 2461 |
+
{
|
| 2462 |
+
"epoch": 0.07492252453958416,
|
| 2463 |
+
"grad_norm": 0.6008387804031372,
|
| 2464 |
+
"learning_rate": 0.0002995644101499871,
|
| 2465 |
+
"loss": 1.2037,
|
| 2466 |
+
"step": 343
|
| 2467 |
+
},
|
| 2468 |
+
{
|
| 2469 |
+
"epoch": 0.07514095755573455,
|
| 2470 |
+
"grad_norm": 0.601810872554779,
|
| 2471 |
+
"learning_rate": 0.0002995617913430292,
|
| 2472 |
+
"loss": 1.2099,
|
| 2473 |
+
"step": 344
|
| 2474 |
+
},
|
| 2475 |
+
{
|
| 2476 |
+
"epoch": 0.07535939057188494,
|
| 2477 |
+
"grad_norm": 0.589198887348175,
|
| 2478 |
+
"learning_rate": 0.0002995591646989064,
|
| 2479 |
+
"loss": 1.4846,
|
| 2480 |
+
"step": 345
|
| 2481 |
+
},
|
| 2482 |
+
{
|
| 2483 |
+
"epoch": 0.07557782358803533,
|
| 2484 |
+
"grad_norm": 0.6595737338066101,
|
| 2485 |
+
"learning_rate": 0.00029955653021775634,
|
| 2486 |
+
"loss": 1.356,
|
| 2487 |
+
"step": 346
|
| 2488 |
+
},
|
| 2489 |
+
{
|
| 2490 |
+
"epoch": 0.07579625660418572,
|
| 2491 |
+
"grad_norm": 0.5962746739387512,
|
| 2492 |
+
"learning_rate": 0.00029955388789971717,
|
| 2493 |
+
"loss": 1.1548,
|
| 2494 |
+
"step": 347
|
| 2495 |
+
},
|
| 2496 |
+
{
|
| 2497 |
+
"epoch": 0.07601468962033611,
|
| 2498 |
+
"grad_norm": 0.6426790952682495,
|
| 2499 |
+
"learning_rate": 0.0002995512377449273,
|
| 2500 |
+
"loss": 1.16,
|
| 2501 |
+
"step": 348
|
| 2502 |
+
},
|
| 2503 |
+
{
|
| 2504 |
+
"epoch": 0.0762331226364865,
|
| 2505 |
+
"grad_norm": 0.6003267765045166,
|
| 2506 |
+
"learning_rate": 0.00029954857975352553,
|
| 2507 |
+
"loss": 1.1259,
|
| 2508 |
+
"step": 349
|
| 2509 |
+
},
|
| 2510 |
+
{
|
| 2511 |
+
"epoch": 0.0764515556526369,
|
| 2512 |
+
"grad_norm": 0.8509035110473633,
|
| 2513 |
+
"learning_rate": 0.0002995459139256512,
|
| 2514 |
+
"loss": 1.0266,
|
| 2515 |
+
"step": 350
|
| 2516 |
+
},
|
| 2517 |
+
{
|
| 2518 |
+
"epoch": 0.0764515556526369,
|
| 2519 |
+
"eval_loss": 1.4707661867141724,
|
| 2520 |
+
"eval_runtime": 79.3549,
|
| 2521 |
+
"eval_samples_per_second": 4.65,
|
| 2522 |
+
"eval_steps_per_second": 4.65,
|
| 2523 |
+
"step": 350
|
| 2524 |
}
|
| 2525 |
],
|
| 2526 |
"logging_steps": 1,
|
|
|
|
| 2535 |
"early_stopping_threshold": 0.0
|
| 2536 |
},
|
| 2537 |
"attributes": {
|
| 2538 |
+
"early_stopping_patience_counter": 3
|
| 2539 |
}
|
| 2540 |
},
|
| 2541 |
"TrainerControl": {
|
|
|
|
| 2544 |
"should_evaluate": false,
|
| 2545 |
"should_log": false,
|
| 2546 |
"should_save": true,
|
| 2547 |
+
"should_training_stop": true
|
| 2548 |
},
|
| 2549 |
"attributes": {}
|
| 2550 |
}
|
| 2551 |
},
|
| 2552 |
+
"total_flos": 1.1919058703430451e+17,
|
| 2553 |
"train_batch_size": 1,
|
| 2554 |
"trial_name": null,
|
| 2555 |
"trial_params": null
|