Training in progress, step 485, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 17425352
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:76f2c4430b5b6a36e6fd64130c460fd6ac9bca3f537fe77cdcd2ce79edadeda6
|
| 3 |
size 17425352
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 10252116
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ef36ee3a6466132a10f7dd39b6df85f79be0008b0dac346fb45ca6dcc51b2d46
|
| 3 |
size 10252116
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eb3a55c0d51d72207d8e4f9d755cd1d052529fce2f6f77001df64ad34434ec04
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d182cc7d860fac568756f7927e2c97705e4fae5142bc06f9dfb2821437b3c9f9
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch":
|
| 5 |
"eval_steps": 500,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2569,6 +2569,847 @@
|
|
| 2569 |
"learning_rate": 1.4414019692241437e-05,
|
| 2570 |
"loss": 1.983,
|
| 2571 |
"step": 366
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2572 |
}
|
| 2573 |
],
|
| 2574 |
"logging_steps": 1,
|
|
@@ -2583,12 +3424,12 @@
|
|
| 2583 |
"should_evaluate": false,
|
| 2584 |
"should_log": false,
|
| 2585 |
"should_save": true,
|
| 2586 |
-
"should_training_stop":
|
| 2587 |
},
|
| 2588 |
"attributes": {}
|
| 2589 |
}
|
| 2590 |
},
|
| 2591 |
-
"total_flos": 1.
|
| 2592 |
"train_batch_size": 4,
|
| 2593 |
"trial_name": null,
|
| 2594 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 1.000515729757607,
|
| 5 |
"eval_steps": 500,
|
| 6 |
+
"global_step": 485,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2569 |
"learning_rate": 1.4414019692241437e-05,
|
| 2570 |
"loss": 1.983,
|
| 2571 |
"step": 366
|
| 2572 |
+
},
|
| 2573 |
+
{
|
| 2574 |
+
"epoch": 0.7570912841670965,
|
| 2575 |
+
"grad_norm": 0.32588624954223633,
|
| 2576 |
+
"learning_rate": 1.4184902828767287e-05,
|
| 2577 |
+
"loss": 1.9556,
|
| 2578 |
+
"step": 367
|
| 2579 |
+
},
|
| 2580 |
+
{
|
| 2581 |
+
"epoch": 0.7591542031975245,
|
| 2582 |
+
"grad_norm": 0.3390493392944336,
|
| 2583 |
+
"learning_rate": 1.3957320164854059e-05,
|
| 2584 |
+
"loss": 2.0438,
|
| 2585 |
+
"step": 368
|
| 2586 |
+
},
|
| 2587 |
+
{
|
| 2588 |
+
"epoch": 0.7612171222279526,
|
| 2589 |
+
"grad_norm": 0.3364558815956116,
|
| 2590 |
+
"learning_rate": 1.373128144938563e-05,
|
| 2591 |
+
"loss": 1.9517,
|
| 2592 |
+
"step": 369
|
| 2593 |
+
},
|
| 2594 |
+
{
|
| 2595 |
+
"epoch": 0.7632800412583806,
|
| 2596 |
+
"grad_norm": 0.35238152742385864,
|
| 2597 |
+
"learning_rate": 1.3506796365108232e-05,
|
| 2598 |
+
"loss": 1.8902,
|
| 2599 |
+
"step": 370
|
| 2600 |
+
},
|
| 2601 |
+
{
|
| 2602 |
+
"epoch": 0.7653429602888087,
|
| 2603 |
+
"grad_norm": 0.4001123607158661,
|
| 2604 |
+
"learning_rate": 1.3283874528215733e-05,
|
| 2605 |
+
"loss": 1.9648,
|
| 2606 |
+
"step": 371
|
| 2607 |
+
},
|
| 2608 |
+
{
|
| 2609 |
+
"epoch": 0.7674058793192368,
|
| 2610 |
+
"grad_norm": 0.3558734953403473,
|
| 2611 |
+
"learning_rate": 1.3062525487937699e-05,
|
| 2612 |
+
"loss": 1.9309,
|
| 2613 |
+
"step": 372
|
| 2614 |
+
},
|
| 2615 |
+
{
|
| 2616 |
+
"epoch": 0.7694687983496648,
|
| 2617 |
+
"grad_norm": 0.3952629268169403,
|
| 2618 |
+
"learning_rate": 1.2842758726130283e-05,
|
| 2619 |
+
"loss": 1.903,
|
| 2620 |
+
"step": 373
|
| 2621 |
+
},
|
| 2622 |
+
{
|
| 2623 |
+
"epoch": 0.7715317173800929,
|
| 2624 |
+
"grad_norm": 0.39679232239723206,
|
| 2625 |
+
"learning_rate": 1.2624583656870154e-05,
|
| 2626 |
+
"loss": 1.9308,
|
| 2627 |
+
"step": 374
|
| 2628 |
+
},
|
| 2629 |
+
{
|
| 2630 |
+
"epoch": 0.7735946364105208,
|
| 2631 |
+
"grad_norm": 0.4120374023914337,
|
| 2632 |
+
"learning_rate": 1.2408009626051137e-05,
|
| 2633 |
+
"loss": 1.9435,
|
| 2634 |
+
"step": 375
|
| 2635 |
+
},
|
| 2636 |
+
{
|
| 2637 |
+
"epoch": 0.7756575554409489,
|
| 2638 |
+
"grad_norm": 0.4087306261062622,
|
| 2639 |
+
"learning_rate": 1.2193045910983863e-05,
|
| 2640 |
+
"loss": 2.0166,
|
| 2641 |
+
"step": 376
|
| 2642 |
+
},
|
| 2643 |
+
{
|
| 2644 |
+
"epoch": 0.777720474471377,
|
| 2645 |
+
"grad_norm": 0.3904735743999481,
|
| 2646 |
+
"learning_rate": 1.1979701719998453e-05,
|
| 2647 |
+
"loss": 1.9106,
|
| 2648 |
+
"step": 377
|
| 2649 |
+
},
|
| 2650 |
+
{
|
| 2651 |
+
"epoch": 0.779783393501805,
|
| 2652 |
+
"grad_norm": 0.4384593665599823,
|
| 2653 |
+
"learning_rate": 1.1767986192049984e-05,
|
| 2654 |
+
"loss": 1.8016,
|
| 2655 |
+
"step": 378
|
| 2656 |
+
},
|
| 2657 |
+
{
|
| 2658 |
+
"epoch": 0.7818463125322331,
|
| 2659 |
+
"grad_norm": 0.45144447684288025,
|
| 2660 |
+
"learning_rate": 1.1557908396327028e-05,
|
| 2661 |
+
"loss": 2.0395,
|
| 2662 |
+
"step": 379
|
| 2663 |
+
},
|
| 2664 |
+
{
|
| 2665 |
+
"epoch": 0.7839092315626611,
|
| 2666 |
+
"grad_norm": 0.46523964405059814,
|
| 2667 |
+
"learning_rate": 1.134947733186315e-05,
|
| 2668 |
+
"loss": 1.8943,
|
| 2669 |
+
"step": 380
|
| 2670 |
+
},
|
| 2671 |
+
{
|
| 2672 |
+
"epoch": 0.7859721505930892,
|
| 2673 |
+
"grad_norm": 0.4391476809978485,
|
| 2674 |
+
"learning_rate": 1.1142701927151456e-05,
|
| 2675 |
+
"loss": 1.9817,
|
| 2676 |
+
"step": 381
|
| 2677 |
+
},
|
| 2678 |
+
{
|
| 2679 |
+
"epoch": 0.7880350696235173,
|
| 2680 |
+
"grad_norm": 0.47244301438331604,
|
| 2681 |
+
"learning_rate": 1.0937591039762085e-05,
|
| 2682 |
+
"loss": 1.9667,
|
| 2683 |
+
"step": 382
|
| 2684 |
+
},
|
| 2685 |
+
{
|
| 2686 |
+
"epoch": 0.7900979886539453,
|
| 2687 |
+
"grad_norm": 0.5111503601074219,
|
| 2688 |
+
"learning_rate": 1.0734153455962765e-05,
|
| 2689 |
+
"loss": 1.7975,
|
| 2690 |
+
"step": 383
|
| 2691 |
+
},
|
| 2692 |
+
{
|
| 2693 |
+
"epoch": 0.7921609076843734,
|
| 2694 |
+
"grad_norm": 0.49108216166496277,
|
| 2695 |
+
"learning_rate": 1.0532397890342505e-05,
|
| 2696 |
+
"loss": 1.9077,
|
| 2697 |
+
"step": 384
|
| 2698 |
+
},
|
| 2699 |
+
{
|
| 2700 |
+
"epoch": 0.7942238267148014,
|
| 2701 |
+
"grad_norm": 0.493367463350296,
|
| 2702 |
+
"learning_rate": 1.0332332985438248e-05,
|
| 2703 |
+
"loss": 1.818,
|
| 2704 |
+
"step": 385
|
| 2705 |
+
},
|
| 2706 |
+
{
|
| 2707 |
+
"epoch": 0.7962867457452295,
|
| 2708 |
+
"grad_norm": 0.5333409309387207,
|
| 2709 |
+
"learning_rate": 1.013396731136465e-05,
|
| 2710 |
+
"loss": 1.9223,
|
| 2711 |
+
"step": 386
|
| 2712 |
+
},
|
| 2713 |
+
{
|
| 2714 |
+
"epoch": 0.7983496647756576,
|
| 2715 |
+
"grad_norm": 0.559504508972168,
|
| 2716 |
+
"learning_rate": 9.937309365446973e-06,
|
| 2717 |
+
"loss": 1.961,
|
| 2718 |
+
"step": 387
|
| 2719 |
+
},
|
| 2720 |
+
{
|
| 2721 |
+
"epoch": 0.8004125838060856,
|
| 2722 |
+
"grad_norm": 0.588911771774292,
|
| 2723 |
+
"learning_rate": 9.742367571857091e-06,
|
| 2724 |
+
"loss": 1.9333,
|
| 2725 |
+
"step": 388
|
| 2726 |
+
},
|
| 2727 |
+
{
|
| 2728 |
+
"epoch": 0.8024755028365137,
|
| 2729 |
+
"grad_norm": 0.6506195068359375,
|
| 2730 |
+
"learning_rate": 9.549150281252633e-06,
|
| 2731 |
+
"loss": 1.9279,
|
| 2732 |
+
"step": 389
|
| 2733 |
+
},
|
| 2734 |
+
{
|
| 2735 |
+
"epoch": 0.8045384218669417,
|
| 2736 |
+
"grad_norm": 0.6028062105178833,
|
| 2737 |
+
"learning_rate": 9.357665770419244e-06,
|
| 2738 |
+
"loss": 1.7466,
|
| 2739 |
+
"step": 390
|
| 2740 |
+
},
|
| 2741 |
+
{
|
| 2742 |
+
"epoch": 0.8066013408973698,
|
| 2743 |
+
"grad_norm": 0.6397696137428284,
|
| 2744 |
+
"learning_rate": 9.167922241916055e-06,
|
| 2745 |
+
"loss": 2.044,
|
| 2746 |
+
"step": 391
|
| 2747 |
+
},
|
| 2748 |
+
{
|
| 2749 |
+
"epoch": 0.8086642599277978,
|
| 2750 |
+
"grad_norm": 0.6506878733634949,
|
| 2751 |
+
"learning_rate": 8.97992782372432e-06,
|
| 2752 |
+
"loss": 1.7423,
|
| 2753 |
+
"step": 392
|
| 2754 |
+
},
|
| 2755 |
+
{
|
| 2756 |
+
"epoch": 0.8107271789582259,
|
| 2757 |
+
"grad_norm": 0.7176264524459839,
|
| 2758 |
+
"learning_rate": 8.793690568899216e-06,
|
| 2759 |
+
"loss": 1.9563,
|
| 2760 |
+
"step": 393
|
| 2761 |
+
},
|
| 2762 |
+
{
|
| 2763 |
+
"epoch": 0.812790097988654,
|
| 2764 |
+
"grad_norm": 0.6943826079368591,
|
| 2765 |
+
"learning_rate": 8.609218455224893e-06,
|
| 2766 |
+
"loss": 1.8613,
|
| 2767 |
+
"step": 394
|
| 2768 |
+
},
|
| 2769 |
+
{
|
| 2770 |
+
"epoch": 0.814853017019082,
|
| 2771 |
+
"grad_norm": 0.7640383243560791,
|
| 2772 |
+
"learning_rate": 8.426519384872733e-06,
|
| 2773 |
+
"loss": 1.8667,
|
| 2774 |
+
"step": 395
|
| 2775 |
+
},
|
| 2776 |
+
{
|
| 2777 |
+
"epoch": 0.8169159360495101,
|
| 2778 |
+
"grad_norm": 0.8125397562980652,
|
| 2779 |
+
"learning_rate": 8.245601184062852e-06,
|
| 2780 |
+
"loss": 1.7427,
|
| 2781 |
+
"step": 396
|
| 2782 |
+
},
|
| 2783 |
+
{
|
| 2784 |
+
"epoch": 0.8189788550799381,
|
| 2785 |
+
"grad_norm": 0.9590041041374207,
|
| 2786 |
+
"learning_rate": 8.066471602728803e-06,
|
| 2787 |
+
"loss": 2.1324,
|
| 2788 |
+
"step": 397
|
| 2789 |
+
},
|
| 2790 |
+
{
|
| 2791 |
+
"epoch": 0.8210417741103662,
|
| 2792 |
+
"grad_norm": 1.1071752309799194,
|
| 2793 |
+
"learning_rate": 7.889138314185678e-06,
|
| 2794 |
+
"loss": 1.7715,
|
| 2795 |
+
"step": 398
|
| 2796 |
+
},
|
| 2797 |
+
{
|
| 2798 |
+
"epoch": 0.8231046931407943,
|
| 2799 |
+
"grad_norm": 1.3562276363372803,
|
| 2800 |
+
"learning_rate": 7.71360891480134e-06,
|
| 2801 |
+
"loss": 2.0792,
|
| 2802 |
+
"step": 399
|
| 2803 |
+
},
|
| 2804 |
+
{
|
| 2805 |
+
"epoch": 0.8251676121712223,
|
| 2806 |
+
"grad_norm": 1.4422872066497803,
|
| 2807 |
+
"learning_rate": 7.539890923671062e-06,
|
| 2808 |
+
"loss": 2.1979,
|
| 2809 |
+
"step": 400
|
| 2810 |
+
},
|
| 2811 |
+
{
|
| 2812 |
+
"epoch": 0.8272305312016504,
|
| 2813 |
+
"grad_norm": 0.21497759222984314,
|
| 2814 |
+
"learning_rate": 7.367991782295391e-06,
|
| 2815 |
+
"loss": 1.8834,
|
| 2816 |
+
"step": 401
|
| 2817 |
+
},
|
| 2818 |
+
{
|
| 2819 |
+
"epoch": 0.8292934502320783,
|
| 2820 |
+
"grad_norm": 0.2113157957792282,
|
| 2821 |
+
"learning_rate": 7.197918854261432e-06,
|
| 2822 |
+
"loss": 1.9432,
|
| 2823 |
+
"step": 402
|
| 2824 |
+
},
|
| 2825 |
+
{
|
| 2826 |
+
"epoch": 0.8313563692625064,
|
| 2827 |
+
"grad_norm": 0.2200651615858078,
|
| 2828 |
+
"learning_rate": 7.029679424927365e-06,
|
| 2829 |
+
"loss": 1.9745,
|
| 2830 |
+
"step": 403
|
| 2831 |
+
},
|
| 2832 |
+
{
|
| 2833 |
+
"epoch": 0.8334192882929345,
|
| 2834 |
+
"grad_norm": 0.2158503383398056,
|
| 2835 |
+
"learning_rate": 6.863280701110408e-06,
|
| 2836 |
+
"loss": 1.9833,
|
| 2837 |
+
"step": 404
|
| 2838 |
+
},
|
| 2839 |
+
{
|
| 2840 |
+
"epoch": 0.8354822073233625,
|
| 2841 |
+
"grad_norm": 0.218078151345253,
|
| 2842 |
+
"learning_rate": 6.698729810778065e-06,
|
| 2843 |
+
"loss": 1.9317,
|
| 2844 |
+
"step": 405
|
| 2845 |
+
},
|
| 2846 |
+
{
|
| 2847 |
+
"epoch": 0.8375451263537906,
|
| 2848 |
+
"grad_norm": 0.2388862818479538,
|
| 2849 |
+
"learning_rate": 6.536033802742813e-06,
|
| 2850 |
+
"loss": 1.9091,
|
| 2851 |
+
"step": 406
|
| 2852 |
+
},
|
| 2853 |
+
{
|
| 2854 |
+
"epoch": 0.8396080453842186,
|
| 2855 |
+
"grad_norm": 0.25134262442588806,
|
| 2856 |
+
"learning_rate": 6.375199646360142e-06,
|
| 2857 |
+
"loss": 1.9376,
|
| 2858 |
+
"step": 407
|
| 2859 |
+
},
|
| 2860 |
+
{
|
| 2861 |
+
"epoch": 0.8416709644146467,
|
| 2862 |
+
"grad_norm": 0.2656765580177307,
|
| 2863 |
+
"learning_rate": 6.216234231230012e-06,
|
| 2864 |
+
"loss": 1.87,
|
| 2865 |
+
"step": 408
|
| 2866 |
+
},
|
| 2867 |
+
{
|
| 2868 |
+
"epoch": 0.8437338834450748,
|
| 2869 |
+
"grad_norm": 0.29165396094322205,
|
| 2870 |
+
"learning_rate": 6.059144366901736e-06,
|
| 2871 |
+
"loss": 1.9119,
|
| 2872 |
+
"step": 409
|
| 2873 |
+
},
|
| 2874 |
+
{
|
| 2875 |
+
"epoch": 0.8457968024755028,
|
| 2876 |
+
"grad_norm": 0.2790237367153168,
|
| 2877 |
+
"learning_rate": 5.903936782582253e-06,
|
| 2878 |
+
"loss": 1.9169,
|
| 2879 |
+
"step": 410
|
| 2880 |
+
},
|
| 2881 |
+
{
|
| 2882 |
+
"epoch": 0.8478597215059309,
|
| 2883 |
+
"grad_norm": 0.2730090022087097,
|
| 2884 |
+
"learning_rate": 5.750618126847912e-06,
|
| 2885 |
+
"loss": 1.8922,
|
| 2886 |
+
"step": 411
|
| 2887 |
+
},
|
| 2888 |
+
{
|
| 2889 |
+
"epoch": 0.8499226405363589,
|
| 2890 |
+
"grad_norm": 0.2939763069152832,
|
| 2891 |
+
"learning_rate": 5.599194967359639e-06,
|
| 2892 |
+
"loss": 1.944,
|
| 2893 |
+
"step": 412
|
| 2894 |
+
},
|
| 2895 |
+
{
|
| 2896 |
+
"epoch": 0.851985559566787,
|
| 2897 |
+
"grad_norm": 0.3193075656890869,
|
| 2898 |
+
"learning_rate": 5.449673790581611e-06,
|
| 2899 |
+
"loss": 1.9055,
|
| 2900 |
+
"step": 413
|
| 2901 |
+
},
|
| 2902 |
+
{
|
| 2903 |
+
"epoch": 0.8540484785972151,
|
| 2904 |
+
"grad_norm": 0.31024137139320374,
|
| 2905 |
+
"learning_rate": 5.302061001503394e-06,
|
| 2906 |
+
"loss": 1.9672,
|
| 2907 |
+
"step": 414
|
| 2908 |
+
},
|
| 2909 |
+
{
|
| 2910 |
+
"epoch": 0.8561113976276431,
|
| 2911 |
+
"grad_norm": 0.31645095348358154,
|
| 2912 |
+
"learning_rate": 5.156362923365588e-06,
|
| 2913 |
+
"loss": 1.9433,
|
| 2914 |
+
"step": 415
|
| 2915 |
+
},
|
| 2916 |
+
{
|
| 2917 |
+
"epoch": 0.8581743166580712,
|
| 2918 |
+
"grad_norm": 0.30905312299728394,
|
| 2919 |
+
"learning_rate": 5.012585797388936e-06,
|
| 2920 |
+
"loss": 1.8743,
|
| 2921 |
+
"step": 416
|
| 2922 |
+
},
|
| 2923 |
+
{
|
| 2924 |
+
"epoch": 0.8602372356884992,
|
| 2925 |
+
"grad_norm": 0.3501185476779938,
|
| 2926 |
+
"learning_rate": 4.87073578250698e-06,
|
| 2927 |
+
"loss": 1.907,
|
| 2928 |
+
"step": 417
|
| 2929 |
+
},
|
| 2930 |
+
{
|
| 2931 |
+
"epoch": 0.8623001547189273,
|
| 2932 |
+
"grad_norm": 0.34194281697273254,
|
| 2933 |
+
"learning_rate": 4.730818955102234e-06,
|
| 2934 |
+
"loss": 1.9042,
|
| 2935 |
+
"step": 418
|
| 2936 |
+
},
|
| 2937 |
+
{
|
| 2938 |
+
"epoch": 0.8643630737493554,
|
| 2939 |
+
"grad_norm": 0.32330402731895447,
|
| 2940 |
+
"learning_rate": 4.592841308745932e-06,
|
| 2941 |
+
"loss": 1.8961,
|
| 2942 |
+
"step": 419
|
| 2943 |
+
},
|
| 2944 |
+
{
|
| 2945 |
+
"epoch": 0.8664259927797834,
|
| 2946 |
+
"grad_norm": 0.35270601511001587,
|
| 2947 |
+
"learning_rate": 4.456808753941205e-06,
|
| 2948 |
+
"loss": 1.8992,
|
| 2949 |
+
"step": 420
|
| 2950 |
+
},
|
| 2951 |
+
{
|
| 2952 |
+
"epoch": 0.8684889118102115,
|
| 2953 |
+
"grad_norm": 0.3494870662689209,
|
| 2954 |
+
"learning_rate": 4.322727117869951e-06,
|
| 2955 |
+
"loss": 1.7888,
|
| 2956 |
+
"step": 421
|
| 2957 |
+
},
|
| 2958 |
+
{
|
| 2959 |
+
"epoch": 0.8705518308406395,
|
| 2960 |
+
"grad_norm": 0.36365413665771484,
|
| 2961 |
+
"learning_rate": 4.190602144143207e-06,
|
| 2962 |
+
"loss": 1.8911,
|
| 2963 |
+
"step": 422
|
| 2964 |
+
},
|
| 2965 |
+
{
|
| 2966 |
+
"epoch": 0.8726147498710676,
|
| 2967 |
+
"grad_norm": 0.37552961707115173,
|
| 2968 |
+
"learning_rate": 4.06043949255509e-06,
|
| 2969 |
+
"loss": 1.7402,
|
| 2970 |
+
"step": 423
|
| 2971 |
+
},
|
| 2972 |
+
{
|
| 2973 |
+
"epoch": 0.8746776689014956,
|
| 2974 |
+
"grad_norm": 0.359493225812912,
|
| 2975 |
+
"learning_rate": 3.932244738840379e-06,
|
| 2976 |
+
"loss": 1.8655,
|
| 2977 |
+
"step": 424
|
| 2978 |
+
},
|
| 2979 |
+
{
|
| 2980 |
+
"epoch": 0.8767405879319237,
|
| 2981 |
+
"grad_norm": 0.40204378962516785,
|
| 2982 |
+
"learning_rate": 3.8060233744356633e-06,
|
| 2983 |
+
"loss": 1.9102,
|
| 2984 |
+
"step": 425
|
| 2985 |
+
},
|
| 2986 |
+
{
|
| 2987 |
+
"epoch": 0.8788035069623518,
|
| 2988 |
+
"grad_norm": 0.3866499364376068,
|
| 2989 |
+
"learning_rate": 3.681780806244095e-06,
|
| 2990 |
+
"loss": 2.0126,
|
| 2991 |
+
"step": 426
|
| 2992 |
+
},
|
| 2993 |
+
{
|
| 2994 |
+
"epoch": 0.8808664259927798,
|
| 2995 |
+
"grad_norm": 0.41565510630607605,
|
| 2996 |
+
"learning_rate": 3.5595223564037884e-06,
|
| 2997 |
+
"loss": 1.848,
|
| 2998 |
+
"step": 427
|
| 2999 |
+
},
|
| 3000 |
+
{
|
| 3001 |
+
"epoch": 0.8829293450232079,
|
| 3002 |
+
"grad_norm": 0.42392176389694214,
|
| 3003 |
+
"learning_rate": 3.4392532620598216e-06,
|
| 3004 |
+
"loss": 1.9023,
|
| 3005 |
+
"step": 428
|
| 3006 |
+
},
|
| 3007 |
+
{
|
| 3008 |
+
"epoch": 0.8849922640536358,
|
| 3009 |
+
"grad_norm": 0.43753212690353394,
|
| 3010 |
+
"learning_rate": 3.3209786751399187e-06,
|
| 3011 |
+
"loss": 1.8755,
|
| 3012 |
+
"step": 429
|
| 3013 |
+
},
|
| 3014 |
+
{
|
| 3015 |
+
"epoch": 0.887055183084064,
|
| 3016 |
+
"grad_norm": 0.41941091418266296,
|
| 3017 |
+
"learning_rate": 3.2047036621337236e-06,
|
| 3018 |
+
"loss": 1.845,
|
| 3019 |
+
"step": 430
|
| 3020 |
+
},
|
| 3021 |
+
{
|
| 3022 |
+
"epoch": 0.889118102114492,
|
| 3023 |
+
"grad_norm": 0.47605544328689575,
|
| 3024 |
+
"learning_rate": 3.0904332038757977e-06,
|
| 3025 |
+
"loss": 1.9215,
|
| 3026 |
+
"step": 431
|
| 3027 |
+
},
|
| 3028 |
+
{
|
| 3029 |
+
"epoch": 0.89118102114492,
|
| 3030 |
+
"grad_norm": 0.4815257489681244,
|
| 3031 |
+
"learning_rate": 2.978172195332263e-06,
|
| 3032 |
+
"loss": 1.8724,
|
| 3033 |
+
"step": 432
|
| 3034 |
+
},
|
| 3035 |
+
{
|
| 3036 |
+
"epoch": 0.8932439401753481,
|
| 3037 |
+
"grad_norm": 0.5292302966117859,
|
| 3038 |
+
"learning_rate": 2.8679254453910785e-06,
|
| 3039 |
+
"loss": 1.9883,
|
| 3040 |
+
"step": 433
|
| 3041 |
+
},
|
| 3042 |
+
{
|
| 3043 |
+
"epoch": 0.8953068592057761,
|
| 3044 |
+
"grad_norm": 0.5305167436599731,
|
| 3045 |
+
"learning_rate": 2.759697676656098e-06,
|
| 3046 |
+
"loss": 1.9377,
|
| 3047 |
+
"step": 434
|
| 3048 |
+
},
|
| 3049 |
+
{
|
| 3050 |
+
"epoch": 0.8973697782362042,
|
| 3051 |
+
"grad_norm": 0.5222244262695312,
|
| 3052 |
+
"learning_rate": 2.653493525244721e-06,
|
| 3053 |
+
"loss": 2.0211,
|
| 3054 |
+
"step": 435
|
| 3055 |
+
},
|
| 3056 |
+
{
|
| 3057 |
+
"epoch": 0.8994326972666323,
|
| 3058 |
+
"grad_norm": 0.5317448377609253,
|
| 3059 |
+
"learning_rate": 2.549317540589308e-06,
|
| 3060 |
+
"loss": 1.7099,
|
| 3061 |
+
"step": 436
|
| 3062 |
+
},
|
| 3063 |
+
{
|
| 3064 |
+
"epoch": 0.9014956162970603,
|
| 3065 |
+
"grad_norm": 0.5942634344100952,
|
| 3066 |
+
"learning_rate": 2.4471741852423237e-06,
|
| 3067 |
+
"loss": 1.8725,
|
| 3068 |
+
"step": 437
|
| 3069 |
+
},
|
| 3070 |
+
{
|
| 3071 |
+
"epoch": 0.9035585353274884,
|
| 3072 |
+
"grad_norm": 0.5700388550758362,
|
| 3073 |
+
"learning_rate": 2.3470678346851518e-06,
|
| 3074 |
+
"loss": 1.81,
|
| 3075 |
+
"step": 438
|
| 3076 |
+
},
|
| 3077 |
+
{
|
| 3078 |
+
"epoch": 0.9056214543579164,
|
| 3079 |
+
"grad_norm": 0.6428921818733215,
|
| 3080 |
+
"learning_rate": 2.2490027771406687e-06,
|
| 3081 |
+
"loss": 1.9537,
|
| 3082 |
+
"step": 439
|
| 3083 |
+
},
|
| 3084 |
+
{
|
| 3085 |
+
"epoch": 0.9076843733883445,
|
| 3086 |
+
"grad_norm": 0.6690090894699097,
|
| 3087 |
+
"learning_rate": 2.152983213389559e-06,
|
| 3088 |
+
"loss": 1.9448,
|
| 3089 |
+
"step": 440
|
| 3090 |
+
},
|
| 3091 |
+
{
|
| 3092 |
+
"epoch": 0.9097472924187726,
|
| 3093 |
+
"grad_norm": 0.7563278675079346,
|
| 3094 |
+
"learning_rate": 2.0590132565903476e-06,
|
| 3095 |
+
"loss": 1.924,
|
| 3096 |
+
"step": 441
|
| 3097 |
+
},
|
| 3098 |
+
{
|
| 3099 |
+
"epoch": 0.9118102114492006,
|
| 3100 |
+
"grad_norm": 0.7350179553031921,
|
| 3101 |
+
"learning_rate": 1.9670969321032407e-06,
|
| 3102 |
+
"loss": 1.9793,
|
| 3103 |
+
"step": 442
|
| 3104 |
+
},
|
| 3105 |
+
{
|
| 3106 |
+
"epoch": 0.9138731304796287,
|
| 3107 |
+
"grad_norm": 0.7105688452720642,
|
| 3108 |
+
"learning_rate": 1.8772381773176417e-06,
|
| 3109 |
+
"loss": 1.8525,
|
| 3110 |
+
"step": 443
|
| 3111 |
+
},
|
| 3112 |
+
{
|
| 3113 |
+
"epoch": 0.9159360495100567,
|
| 3114 |
+
"grad_norm": 0.6866640448570251,
|
| 3115 |
+
"learning_rate": 1.7894408414835362e-06,
|
| 3116 |
+
"loss": 1.8795,
|
| 3117 |
+
"step": 444
|
| 3118 |
+
},
|
| 3119 |
+
{
|
| 3120 |
+
"epoch": 0.9179989685404848,
|
| 3121 |
+
"grad_norm": 0.846955418586731,
|
| 3122 |
+
"learning_rate": 1.70370868554659e-06,
|
| 3123 |
+
"loss": 2.0021,
|
| 3124 |
+
"step": 445
|
| 3125 |
+
},
|
| 3126 |
+
{
|
| 3127 |
+
"epoch": 0.9200618875709129,
|
| 3128 |
+
"grad_norm": 0.9408524036407471,
|
| 3129 |
+
"learning_rate": 1.620045381987012e-06,
|
| 3130 |
+
"loss": 1.9355,
|
| 3131 |
+
"step": 446
|
| 3132 |
+
},
|
| 3133 |
+
{
|
| 3134 |
+
"epoch": 0.9221248066013409,
|
| 3135 |
+
"grad_norm": 0.9612482786178589,
|
| 3136 |
+
"learning_rate": 1.5384545146622852e-06,
|
| 3137 |
+
"loss": 1.8513,
|
| 3138 |
+
"step": 447
|
| 3139 |
+
},
|
| 3140 |
+
{
|
| 3141 |
+
"epoch": 0.924187725631769,
|
| 3142 |
+
"grad_norm": 1.189302682876587,
|
| 3143 |
+
"learning_rate": 1.4589395786535953e-06,
|
| 3144 |
+
"loss": 2.2855,
|
| 3145 |
+
"step": 448
|
| 3146 |
+
},
|
| 3147 |
+
{
|
| 3148 |
+
"epoch": 0.926250644662197,
|
| 3149 |
+
"grad_norm": 1.494503378868103,
|
| 3150 |
+
"learning_rate": 1.3815039801161721e-06,
|
| 3151 |
+
"loss": 2.0451,
|
| 3152 |
+
"step": 449
|
| 3153 |
+
},
|
| 3154 |
+
{
|
| 3155 |
+
"epoch": 0.9283135636926251,
|
| 3156 |
+
"grad_norm": 2.0391123294830322,
|
| 3157 |
+
"learning_rate": 1.3061510361333185e-06,
|
| 3158 |
+
"loss": 2.315,
|
| 3159 |
+
"step": 450
|
| 3160 |
+
},
|
| 3161 |
+
{
|
| 3162 |
+
"epoch": 0.9303764827230531,
|
| 3163 |
+
"grad_norm": 0.1975540965795517,
|
| 3164 |
+
"learning_rate": 1.232883974574367e-06,
|
| 3165 |
+
"loss": 1.8567,
|
| 3166 |
+
"step": 451
|
| 3167 |
+
},
|
| 3168 |
+
{
|
| 3169 |
+
"epoch": 0.9324394017534812,
|
| 3170 |
+
"grad_norm": 0.2123524248600006,
|
| 3171 |
+
"learning_rate": 1.1617059339563807e-06,
|
| 3172 |
+
"loss": 1.9558,
|
| 3173 |
+
"step": 452
|
| 3174 |
+
},
|
| 3175 |
+
{
|
| 3176 |
+
"epoch": 0.9345023207839093,
|
| 3177 |
+
"grad_norm": 0.23298895359039307,
|
| 3178 |
+
"learning_rate": 1.0926199633097157e-06,
|
| 3179 |
+
"loss": 1.9029,
|
| 3180 |
+
"step": 453
|
| 3181 |
+
},
|
| 3182 |
+
{
|
| 3183 |
+
"epoch": 0.9365652398143373,
|
| 3184 |
+
"grad_norm": 0.24727781116962433,
|
| 3185 |
+
"learning_rate": 1.0256290220474307e-06,
|
| 3186 |
+
"loss": 1.9507,
|
| 3187 |
+
"step": 454
|
| 3188 |
+
},
|
| 3189 |
+
{
|
| 3190 |
+
"epoch": 0.9386281588447654,
|
| 3191 |
+
"grad_norm": 0.24942223727703094,
|
| 3192 |
+
"learning_rate": 9.607359798384785e-07,
|
| 3193 |
+
"loss": 1.9074,
|
| 3194 |
+
"step": 455
|
| 3195 |
+
},
|
| 3196 |
+
{
|
| 3197 |
+
"epoch": 0.9406910778751933,
|
| 3198 |
+
"grad_norm": 0.28238189220428467,
|
| 3199 |
+
"learning_rate": 8.979436164848088e-07,
|
| 3200 |
+
"loss": 1.932,
|
| 3201 |
+
"step": 456
|
| 3202 |
+
},
|
| 3203 |
+
{
|
| 3204 |
+
"epoch": 0.9427539969056214,
|
| 3205 |
+
"grad_norm": 0.2833389341831207,
|
| 3206 |
+
"learning_rate": 8.372546218022747e-07,
|
| 3207 |
+
"loss": 1.912,
|
| 3208 |
+
"step": 457
|
| 3209 |
+
},
|
| 3210 |
+
{
|
| 3211 |
+
"epoch": 0.9448169159360496,
|
| 3212 |
+
"grad_norm": 0.29291990399360657,
|
| 3213 |
+
"learning_rate": 7.786715955054203e-07,
|
| 3214 |
+
"loss": 1.9424,
|
| 3215 |
+
"step": 458
|
| 3216 |
+
},
|
| 3217 |
+
{
|
| 3218 |
+
"epoch": 0.9468798349664775,
|
| 3219 |
+
"grad_norm": 0.2940320372581482,
|
| 3220 |
+
"learning_rate": 7.221970470961125e-07,
|
| 3221 |
+
"loss": 1.847,
|
| 3222 |
+
"step": 459
|
| 3223 |
+
},
|
| 3224 |
+
{
|
| 3225 |
+
"epoch": 0.9489427539969056,
|
| 3226 |
+
"grad_norm": 0.32583457231521606,
|
| 3227 |
+
"learning_rate": 6.678333957560512e-07,
|
| 3228 |
+
"loss": 1.8331,
|
| 3229 |
+
"step": 460
|
| 3230 |
+
},
|
| 3231 |
+
{
|
| 3232 |
+
"epoch": 0.9510056730273336,
|
| 3233 |
+
"grad_norm": 0.339235782623291,
|
| 3234 |
+
"learning_rate": 6.15582970243117e-07,
|
| 3235 |
+
"loss": 2.0272,
|
| 3236 |
+
"step": 461
|
| 3237 |
+
},
|
| 3238 |
+
{
|
| 3239 |
+
"epoch": 0.9530685920577617,
|
| 3240 |
+
"grad_norm": 0.3748321533203125,
|
| 3241 |
+
"learning_rate": 5.654480087916303e-07,
|
| 3242 |
+
"loss": 1.9018,
|
| 3243 |
+
"step": 462
|
| 3244 |
+
},
|
| 3245 |
+
{
|
| 3246 |
+
"epoch": 0.9551315110881898,
|
| 3247 |
+
"grad_norm": 0.3701133131980896,
|
| 3248 |
+
"learning_rate": 5.174306590164879e-07,
|
| 3249 |
+
"loss": 2.0178,
|
| 3250 |
+
"step": 463
|
| 3251 |
+
},
|
| 3252 |
+
{
|
| 3253 |
+
"epoch": 0.9571944301186178,
|
| 3254 |
+
"grad_norm": 0.3596010208129883,
|
| 3255 |
+
"learning_rate": 4.715329778211375e-07,
|
| 3256 |
+
"loss": 1.929,
|
| 3257 |
+
"step": 464
|
| 3258 |
+
},
|
| 3259 |
+
{
|
| 3260 |
+
"epoch": 0.9592573491490459,
|
| 3261 |
+
"grad_norm": 0.3654044568538666,
|
| 3262 |
+
"learning_rate": 4.277569313094809e-07,
|
| 3263 |
+
"loss": 1.9229,
|
| 3264 |
+
"step": 465
|
| 3265 |
+
},
|
| 3266 |
+
{
|
| 3267 |
+
"epoch": 0.9613202681794739,
|
| 3268 |
+
"grad_norm": 0.3960861563682556,
|
| 3269 |
+
"learning_rate": 3.8610439470164737e-07,
|
| 3270 |
+
"loss": 1.9158,
|
| 3271 |
+
"step": 466
|
| 3272 |
+
},
|
| 3273 |
+
{
|
| 3274 |
+
"epoch": 0.963383187209902,
|
| 3275 |
+
"grad_norm": 0.40447795391082764,
|
| 3276 |
+
"learning_rate": 3.465771522536854e-07,
|
| 3277 |
+
"loss": 1.8699,
|
| 3278 |
+
"step": 467
|
| 3279 |
+
},
|
| 3280 |
+
{
|
| 3281 |
+
"epoch": 0.9654461062403301,
|
| 3282 |
+
"grad_norm": 0.4091316759586334,
|
| 3283 |
+
"learning_rate": 3.09176897181096e-07,
|
| 3284 |
+
"loss": 2.0017,
|
| 3285 |
+
"step": 468
|
| 3286 |
+
},
|
| 3287 |
+
{
|
| 3288 |
+
"epoch": 0.9675090252707581,
|
| 3289 |
+
"grad_norm": 0.48065048456192017,
|
| 3290 |
+
"learning_rate": 2.7390523158633554e-07,
|
| 3291 |
+
"loss": 2.0616,
|
| 3292 |
+
"step": 469
|
| 3293 |
+
},
|
| 3294 |
+
{
|
| 3295 |
+
"epoch": 0.9695719443011862,
|
| 3296 |
+
"grad_norm": 0.45243510603904724,
|
| 3297 |
+
"learning_rate": 2.407636663901591e-07,
|
| 3298 |
+
"loss": 1.8828,
|
| 3299 |
+
"step": 470
|
| 3300 |
+
},
|
| 3301 |
+
{
|
| 3302 |
+
"epoch": 0.9716348633316142,
|
| 3303 |
+
"grad_norm": 0.44933196902275085,
|
| 3304 |
+
"learning_rate": 2.0975362126691712e-07,
|
| 3305 |
+
"loss": 1.9103,
|
| 3306 |
+
"step": 471
|
| 3307 |
+
},
|
| 3308 |
+
{
|
| 3309 |
+
"epoch": 0.9736977823620423,
|
| 3310 |
+
"grad_norm": 0.4988139569759369,
|
| 3311 |
+
"learning_rate": 1.8087642458373134e-07,
|
| 3312 |
+
"loss": 1.9078,
|
| 3313 |
+
"step": 472
|
| 3314 |
+
},
|
| 3315 |
+
{
|
| 3316 |
+
"epoch": 0.9757607013924704,
|
| 3317 |
+
"grad_norm": 0.5141667127609253,
|
| 3318 |
+
"learning_rate": 1.5413331334360182e-07,
|
| 3319 |
+
"loss": 2.0586,
|
| 3320 |
+
"step": 473
|
| 3321 |
+
},
|
| 3322 |
+
{
|
| 3323 |
+
"epoch": 0.9778236204228984,
|
| 3324 |
+
"grad_norm": 0.5143790245056152,
|
| 3325 |
+
"learning_rate": 1.2952543313240472e-07,
|
| 3326 |
+
"loss": 1.8032,
|
| 3327 |
+
"step": 474
|
| 3328 |
+
},
|
| 3329 |
+
{
|
| 3330 |
+
"epoch": 0.9798865394533265,
|
| 3331 |
+
"grad_norm": 0.5681686997413635,
|
| 3332 |
+
"learning_rate": 1.0705383806982606e-07,
|
| 3333 |
+
"loss": 1.8457,
|
| 3334 |
+
"step": 475
|
| 3335 |
+
},
|
| 3336 |
+
{
|
| 3337 |
+
"epoch": 0.9819494584837545,
|
| 3338 |
+
"grad_norm": 0.5598201155662537,
|
| 3339 |
+
"learning_rate": 8.671949076420882e-08,
|
| 3340 |
+
"loss": 1.8757,
|
| 3341 |
+
"step": 476
|
| 3342 |
+
},
|
| 3343 |
+
{
|
| 3344 |
+
"epoch": 0.9840123775141826,
|
| 3345 |
+
"grad_norm": 0.6028151512145996,
|
| 3346 |
+
"learning_rate": 6.852326227130834e-08,
|
| 3347 |
+
"loss": 1.8709,
|
| 3348 |
+
"step": 477
|
| 3349 |
+
},
|
| 3350 |
+
{
|
| 3351 |
+
"epoch": 0.9860752965446106,
|
| 3352 |
+
"grad_norm": 0.6729274392127991,
|
| 3353 |
+
"learning_rate": 5.246593205699424e-08,
|
| 3354 |
+
"loss": 1.9253,
|
| 3355 |
+
"step": 478
|
| 3356 |
+
},
|
| 3357 |
+
{
|
| 3358 |
+
"epoch": 0.9881382155750387,
|
| 3359 |
+
"grad_norm": 0.755814254283905,
|
| 3360 |
+
"learning_rate": 3.8548187963854956e-08,
|
| 3361 |
+
"loss": 2.0362,
|
| 3362 |
+
"step": 479
|
| 3363 |
+
},
|
| 3364 |
+
{
|
| 3365 |
+
"epoch": 0.9902011346054668,
|
| 3366 |
+
"grad_norm": 0.8245273232460022,
|
| 3367 |
+
"learning_rate": 2.6770626181715773e-08,
|
| 3368 |
+
"loss": 1.9278,
|
| 3369 |
+
"step": 480
|
| 3370 |
+
},
|
| 3371 |
+
{
|
| 3372 |
+
"epoch": 0.9922640536358948,
|
| 3373 |
+
"grad_norm": 0.860695481300354,
|
| 3374 |
+
"learning_rate": 1.7133751222137007e-08,
|
| 3375 |
+
"loss": 2.1189,
|
| 3376 |
+
"step": 481
|
| 3377 |
+
},
|
| 3378 |
+
{
|
| 3379 |
+
"epoch": 0.9943269726663229,
|
| 3380 |
+
"grad_norm": 0.953133225440979,
|
| 3381 |
+
"learning_rate": 9.637975896759077e-09,
|
| 3382 |
+
"loss": 2.0525,
|
| 3383 |
+
"step": 482
|
| 3384 |
+
},
|
| 3385 |
+
{
|
| 3386 |
+
"epoch": 0.9963898916967509,
|
| 3387 |
+
"grad_norm": 1.0958987474441528,
|
| 3388 |
+
"learning_rate": 4.2836212996499865e-09,
|
| 3389 |
+
"loss": 2.0937,
|
| 3390 |
+
"step": 483
|
| 3391 |
+
},
|
| 3392 |
+
{
|
| 3393 |
+
"epoch": 0.998452810727179,
|
| 3394 |
+
"grad_norm": 1.2235816717147827,
|
| 3395 |
+
"learning_rate": 1.0709167935385455e-09,
|
| 3396 |
+
"loss": 2.1888,
|
| 3397 |
+
"step": 484
|
| 3398 |
+
},
|
| 3399 |
+
{
|
| 3400 |
+
"epoch": 0.998452810727179,
|
| 3401 |
+
"eval_loss": 1.9379545450210571,
|
| 3402 |
+
"eval_runtime": 7.3082,
|
| 3403 |
+
"eval_samples_per_second": 55.964,
|
| 3404 |
+
"eval_steps_per_second": 14.094,
|
| 3405 |
+
"step": 484
|
| 3406 |
+
},
|
| 3407 |
+
{
|
| 3408 |
+
"epoch": 1.000515729757607,
|
| 3409 |
+
"grad_norm": 2.641697883605957,
|
| 3410 |
+
"learning_rate": 0.0,
|
| 3411 |
+
"loss": 2.8277,
|
| 3412 |
+
"step": 485
|
| 3413 |
}
|
| 3414 |
],
|
| 3415 |
"logging_steps": 1,
|
|
|
|
| 3424 |
"should_evaluate": false,
|
| 3425 |
"should_log": false,
|
| 3426 |
"should_save": true,
|
| 3427 |
+
"should_training_stop": true
|
| 3428 |
},
|
| 3429 |
"attributes": {}
|
| 3430 |
}
|
| 3431 |
},
|
| 3432 |
+
"total_flos": 1.55117730103296e+16,
|
| 3433 |
"train_batch_size": 4,
|
| 3434 |
"trial_name": null,
|
| 3435 |
"trial_params": null
|