Training in progress, step 900, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 170415112
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4b4b90a5607c4c89779d74ed998538024d8ed9c56ed457658cbb21b7501a98b1
|
| 3 |
size 170415112
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 86718091
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f05fe41dcbf4010e2a33a61e1c9c3a9944cef5a8eb0c696fd735cba7dce7e97a
|
| 3 |
size 86718091
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14645
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:718a0f3db00824213036a2c0441849791319b7d9cf189065873bb26a7020738e
|
| 3 |
size 14645
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1465
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cfbae3a66128dad6557b2d73ff80d6296191d3bebac48c319a2ce570fd79e5df
|
| 3 |
size 1465
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch":
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -4208,6 +4208,2106 @@
|
|
| 4208 |
"learning_rate": 6.726256983240223e-05,
|
| 4209 |
"loss": 0.9836,
|
| 4210 |
"step": 600
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4211 |
}
|
| 4212 |
],
|
| 4213 |
"logging_steps": 1,
|
|
@@ -4222,12 +6322,12 @@
|
|
| 4222 |
"should_evaluate": false,
|
| 4223 |
"should_log": false,
|
| 4224 |
"should_save": true,
|
| 4225 |
-
"should_training_stop":
|
| 4226 |
},
|
| 4227 |
"attributes": {}
|
| 4228 |
}
|
| 4229 |
},
|
| 4230 |
-
"total_flos":
|
| 4231 |
"train_batch_size": 8,
|
| 4232 |
"trial_name": null,
|
| 4233 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 5.0,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 900,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 4208 |
"learning_rate": 6.726256983240223e-05,
|
| 4209 |
"loss": 0.9836,
|
| 4210 |
"step": 600
|
| 4211 |
+
},
|
| 4212 |
+
{
|
| 4213 |
+
"epoch": 3.338888888888889,
|
| 4214 |
+
"grad_norm": 0.07593747973442078,
|
| 4215 |
+
"learning_rate": 6.70391061452514e-05,
|
| 4216 |
+
"loss": 0.9246,
|
| 4217 |
+
"step": 601
|
| 4218 |
+
},
|
| 4219 |
+
{
|
| 4220 |
+
"epoch": 3.3444444444444446,
|
| 4221 |
+
"grad_norm": 0.06724842637777328,
|
| 4222 |
+
"learning_rate": 6.681564245810056e-05,
|
| 4223 |
+
"loss": 0.8799,
|
| 4224 |
+
"step": 602
|
| 4225 |
+
},
|
| 4226 |
+
{
|
| 4227 |
+
"epoch": 3.35,
|
| 4228 |
+
"grad_norm": 0.0651603415608406,
|
| 4229 |
+
"learning_rate": 6.659217877094972e-05,
|
| 4230 |
+
"loss": 0.9114,
|
| 4231 |
+
"step": 603
|
| 4232 |
+
},
|
| 4233 |
+
{
|
| 4234 |
+
"epoch": 3.3555555555555556,
|
| 4235 |
+
"grad_norm": 0.06483855843544006,
|
| 4236 |
+
"learning_rate": 6.636871508379888e-05,
|
| 4237 |
+
"loss": 0.8418,
|
| 4238 |
+
"step": 604
|
| 4239 |
+
},
|
| 4240 |
+
{
|
| 4241 |
+
"epoch": 3.361111111111111,
|
| 4242 |
+
"grad_norm": 0.07553449273109436,
|
| 4243 |
+
"learning_rate": 6.614525139664804e-05,
|
| 4244 |
+
"loss": 0.8239,
|
| 4245 |
+
"step": 605
|
| 4246 |
+
},
|
| 4247 |
+
{
|
| 4248 |
+
"epoch": 3.3666666666666667,
|
| 4249 |
+
"grad_norm": 0.06333702057600021,
|
| 4250 |
+
"learning_rate": 6.59217877094972e-05,
|
| 4251 |
+
"loss": 0.8718,
|
| 4252 |
+
"step": 606
|
| 4253 |
+
},
|
| 4254 |
+
{
|
| 4255 |
+
"epoch": 3.3722222222222222,
|
| 4256 |
+
"grad_norm": 0.06561273336410522,
|
| 4257 |
+
"learning_rate": 6.569832402234637e-05,
|
| 4258 |
+
"loss": 1.0049,
|
| 4259 |
+
"step": 607
|
| 4260 |
+
},
|
| 4261 |
+
{
|
| 4262 |
+
"epoch": 3.3777777777777778,
|
| 4263 |
+
"grad_norm": 0.05982871726155281,
|
| 4264 |
+
"learning_rate": 6.547486033519553e-05,
|
| 4265 |
+
"loss": 0.8011,
|
| 4266 |
+
"step": 608
|
| 4267 |
+
},
|
| 4268 |
+
{
|
| 4269 |
+
"epoch": 3.3833333333333333,
|
| 4270 |
+
"grad_norm": 0.06759478896856308,
|
| 4271 |
+
"learning_rate": 6.52513966480447e-05,
|
| 4272 |
+
"loss": 0.973,
|
| 4273 |
+
"step": 609
|
| 4274 |
+
},
|
| 4275 |
+
{
|
| 4276 |
+
"epoch": 3.388888888888889,
|
| 4277 |
+
"grad_norm": 0.06581491231918335,
|
| 4278 |
+
"learning_rate": 6.502793296089386e-05,
|
| 4279 |
+
"loss": 0.9308,
|
| 4280 |
+
"step": 610
|
| 4281 |
+
},
|
| 4282 |
+
{
|
| 4283 |
+
"epoch": 3.3944444444444444,
|
| 4284 |
+
"grad_norm": 0.06133756786584854,
|
| 4285 |
+
"learning_rate": 6.480446927374302e-05,
|
| 4286 |
+
"loss": 0.8157,
|
| 4287 |
+
"step": 611
|
| 4288 |
+
},
|
| 4289 |
+
{
|
| 4290 |
+
"epoch": 3.4,
|
| 4291 |
+
"grad_norm": 0.07396920770406723,
|
| 4292 |
+
"learning_rate": 6.458100558659218e-05,
|
| 4293 |
+
"loss": 0.8828,
|
| 4294 |
+
"step": 612
|
| 4295 |
+
},
|
| 4296 |
+
{
|
| 4297 |
+
"epoch": 3.4055555555555554,
|
| 4298 |
+
"grad_norm": 0.06472024321556091,
|
| 4299 |
+
"learning_rate": 6.435754189944134e-05,
|
| 4300 |
+
"loss": 0.7748,
|
| 4301 |
+
"step": 613
|
| 4302 |
+
},
|
| 4303 |
+
{
|
| 4304 |
+
"epoch": 3.411111111111111,
|
| 4305 |
+
"grad_norm": 0.06640879809856415,
|
| 4306 |
+
"learning_rate": 6.41340782122905e-05,
|
| 4307 |
+
"loss": 0.8435,
|
| 4308 |
+
"step": 614
|
| 4309 |
+
},
|
| 4310 |
+
{
|
| 4311 |
+
"epoch": 3.4166666666666665,
|
| 4312 |
+
"grad_norm": 0.07183702290058136,
|
| 4313 |
+
"learning_rate": 6.391061452513967e-05,
|
| 4314 |
+
"loss": 0.9378,
|
| 4315 |
+
"step": 615
|
| 4316 |
+
},
|
| 4317 |
+
{
|
| 4318 |
+
"epoch": 3.422222222222222,
|
| 4319 |
+
"grad_norm": 0.0727454274892807,
|
| 4320 |
+
"learning_rate": 6.368715083798883e-05,
|
| 4321 |
+
"loss": 0.9332,
|
| 4322 |
+
"step": 616
|
| 4323 |
+
},
|
| 4324 |
+
{
|
| 4325 |
+
"epoch": 3.4277777777777776,
|
| 4326 |
+
"grad_norm": 0.06644386053085327,
|
| 4327 |
+
"learning_rate": 6.346368715083799e-05,
|
| 4328 |
+
"loss": 0.9157,
|
| 4329 |
+
"step": 617
|
| 4330 |
+
},
|
| 4331 |
+
{
|
| 4332 |
+
"epoch": 3.4333333333333336,
|
| 4333 |
+
"grad_norm": 0.07607833296060562,
|
| 4334 |
+
"learning_rate": 6.324022346368715e-05,
|
| 4335 |
+
"loss": 0.7545,
|
| 4336 |
+
"step": 618
|
| 4337 |
+
},
|
| 4338 |
+
{
|
| 4339 |
+
"epoch": 3.438888888888889,
|
| 4340 |
+
"grad_norm": 0.056877944618463516,
|
| 4341 |
+
"learning_rate": 6.301675977653632e-05,
|
| 4342 |
+
"loss": 0.8431,
|
| 4343 |
+
"step": 619
|
| 4344 |
+
},
|
| 4345 |
+
{
|
| 4346 |
+
"epoch": 3.4444444444444446,
|
| 4347 |
+
"grad_norm": 0.0674533024430275,
|
| 4348 |
+
"learning_rate": 6.279329608938548e-05,
|
| 4349 |
+
"loss": 0.908,
|
| 4350 |
+
"step": 620
|
| 4351 |
+
},
|
| 4352 |
+
{
|
| 4353 |
+
"epoch": 3.45,
|
| 4354 |
+
"grad_norm": 0.06054288148880005,
|
| 4355 |
+
"learning_rate": 6.256983240223464e-05,
|
| 4356 |
+
"loss": 0.9227,
|
| 4357 |
+
"step": 621
|
| 4358 |
+
},
|
| 4359 |
+
{
|
| 4360 |
+
"epoch": 3.4555555555555557,
|
| 4361 |
+
"grad_norm": 0.06399651616811752,
|
| 4362 |
+
"learning_rate": 6.23463687150838e-05,
|
| 4363 |
+
"loss": 0.8975,
|
| 4364 |
+
"step": 622
|
| 4365 |
+
},
|
| 4366 |
+
{
|
| 4367 |
+
"epoch": 3.4611111111111112,
|
| 4368 |
+
"grad_norm": 0.06626058369874954,
|
| 4369 |
+
"learning_rate": 6.212290502793297e-05,
|
| 4370 |
+
"loss": 0.9773,
|
| 4371 |
+
"step": 623
|
| 4372 |
+
},
|
| 4373 |
+
{
|
| 4374 |
+
"epoch": 3.466666666666667,
|
| 4375 |
+
"grad_norm": 0.060863859951496124,
|
| 4376 |
+
"learning_rate": 6.189944134078213e-05,
|
| 4377 |
+
"loss": 1.0029,
|
| 4378 |
+
"step": 624
|
| 4379 |
+
},
|
| 4380 |
+
{
|
| 4381 |
+
"epoch": 3.4722222222222223,
|
| 4382 |
+
"grad_norm": 0.0591045580804348,
|
| 4383 |
+
"learning_rate": 6.167597765363129e-05,
|
| 4384 |
+
"loss": 0.8533,
|
| 4385 |
+
"step": 625
|
| 4386 |
+
},
|
| 4387 |
+
{
|
| 4388 |
+
"epoch": 3.477777777777778,
|
| 4389 |
+
"grad_norm": 0.056986406445503235,
|
| 4390 |
+
"learning_rate": 6.145251396648045e-05,
|
| 4391 |
+
"loss": 0.8151,
|
| 4392 |
+
"step": 626
|
| 4393 |
+
},
|
| 4394 |
+
{
|
| 4395 |
+
"epoch": 3.4833333333333334,
|
| 4396 |
+
"grad_norm": 0.05965357646346092,
|
| 4397 |
+
"learning_rate": 6.122905027932962e-05,
|
| 4398 |
+
"loss": 0.9094,
|
| 4399 |
+
"step": 627
|
| 4400 |
+
},
|
| 4401 |
+
{
|
| 4402 |
+
"epoch": 3.488888888888889,
|
| 4403 |
+
"grad_norm": 0.05617017298936844,
|
| 4404 |
+
"learning_rate": 6.100558659217878e-05,
|
| 4405 |
+
"loss": 0.7653,
|
| 4406 |
+
"step": 628
|
| 4407 |
+
},
|
| 4408 |
+
{
|
| 4409 |
+
"epoch": 3.4944444444444445,
|
| 4410 |
+
"grad_norm": 0.09308428317308426,
|
| 4411 |
+
"learning_rate": 6.078212290502794e-05,
|
| 4412 |
+
"loss": 0.9318,
|
| 4413 |
+
"step": 629
|
| 4414 |
+
},
|
| 4415 |
+
{
|
| 4416 |
+
"epoch": 3.5,
|
| 4417 |
+
"grad_norm": 0.06707720458507538,
|
| 4418 |
+
"learning_rate": 6.05586592178771e-05,
|
| 4419 |
+
"loss": 1.0462,
|
| 4420 |
+
"step": 630
|
| 4421 |
+
},
|
| 4422 |
+
{
|
| 4423 |
+
"epoch": 3.5055555555555555,
|
| 4424 |
+
"grad_norm": 0.05806177482008934,
|
| 4425 |
+
"learning_rate": 6.0335195530726265e-05,
|
| 4426 |
+
"loss": 0.8771,
|
| 4427 |
+
"step": 631
|
| 4428 |
+
},
|
| 4429 |
+
{
|
| 4430 |
+
"epoch": 3.511111111111111,
|
| 4431 |
+
"grad_norm": 0.060027267783880234,
|
| 4432 |
+
"learning_rate": 6.011173184357543e-05,
|
| 4433 |
+
"loss": 0.8578,
|
| 4434 |
+
"step": 632
|
| 4435 |
+
},
|
| 4436 |
+
{
|
| 4437 |
+
"epoch": 3.5166666666666666,
|
| 4438 |
+
"grad_norm": 0.06268207728862762,
|
| 4439 |
+
"learning_rate": 5.988826815642459e-05,
|
| 4440 |
+
"loss": 0.8026,
|
| 4441 |
+
"step": 633
|
| 4442 |
+
},
|
| 4443 |
+
{
|
| 4444 |
+
"epoch": 3.522222222222222,
|
| 4445 |
+
"grad_norm": 0.06065778434276581,
|
| 4446 |
+
"learning_rate": 5.966480446927375e-05,
|
| 4447 |
+
"loss": 0.8887,
|
| 4448 |
+
"step": 634
|
| 4449 |
+
},
|
| 4450 |
+
{
|
| 4451 |
+
"epoch": 3.5277777777777777,
|
| 4452 |
+
"grad_norm": 0.06879955530166626,
|
| 4453 |
+
"learning_rate": 5.9441340782122914e-05,
|
| 4454 |
+
"loss": 0.9636,
|
| 4455 |
+
"step": 635
|
| 4456 |
+
},
|
| 4457 |
+
{
|
| 4458 |
+
"epoch": 3.533333333333333,
|
| 4459 |
+
"grad_norm": 0.062394339591264725,
|
| 4460 |
+
"learning_rate": 5.9217877094972076e-05,
|
| 4461 |
+
"loss": 0.9258,
|
| 4462 |
+
"step": 636
|
| 4463 |
+
},
|
| 4464 |
+
{
|
| 4465 |
+
"epoch": 3.5388888888888888,
|
| 4466 |
+
"grad_norm": 0.06418924033641815,
|
| 4467 |
+
"learning_rate": 5.899441340782124e-05,
|
| 4468 |
+
"loss": 0.9875,
|
| 4469 |
+
"step": 637
|
| 4470 |
+
},
|
| 4471 |
+
{
|
| 4472 |
+
"epoch": 3.5444444444444443,
|
| 4473 |
+
"grad_norm": 0.06523976475000381,
|
| 4474 |
+
"learning_rate": 5.87709497206704e-05,
|
| 4475 |
+
"loss": 0.8122,
|
| 4476 |
+
"step": 638
|
| 4477 |
+
},
|
| 4478 |
+
{
|
| 4479 |
+
"epoch": 3.55,
|
| 4480 |
+
"grad_norm": 0.09336890280246735,
|
| 4481 |
+
"learning_rate": 5.8547486033519563e-05,
|
| 4482 |
+
"loss": 0.7608,
|
| 4483 |
+
"step": 639
|
| 4484 |
+
},
|
| 4485 |
+
{
|
| 4486 |
+
"epoch": 3.5555555555555554,
|
| 4487 |
+
"grad_norm": 0.06870478391647339,
|
| 4488 |
+
"learning_rate": 5.8324022346368726e-05,
|
| 4489 |
+
"loss": 0.8859,
|
| 4490 |
+
"step": 640
|
| 4491 |
+
},
|
| 4492 |
+
{
|
| 4493 |
+
"epoch": 3.561111111111111,
|
| 4494 |
+
"grad_norm": 0.06951097398996353,
|
| 4495 |
+
"learning_rate": 5.810055865921789e-05,
|
| 4496 |
+
"loss": 0.8773,
|
| 4497 |
+
"step": 641
|
| 4498 |
+
},
|
| 4499 |
+
{
|
| 4500 |
+
"epoch": 3.5666666666666664,
|
| 4501 |
+
"grad_norm": 0.0712515339255333,
|
| 4502 |
+
"learning_rate": 5.787709497206704e-05,
|
| 4503 |
+
"loss": 0.7315,
|
| 4504 |
+
"step": 642
|
| 4505 |
+
},
|
| 4506 |
+
{
|
| 4507 |
+
"epoch": 3.572222222222222,
|
| 4508 |
+
"grad_norm": 0.0626714825630188,
|
| 4509 |
+
"learning_rate": 5.76536312849162e-05,
|
| 4510 |
+
"loss": 0.8983,
|
| 4511 |
+
"step": 643
|
| 4512 |
+
},
|
| 4513 |
+
{
|
| 4514 |
+
"epoch": 3.5777777777777775,
|
| 4515 |
+
"grad_norm": 0.06400678306818008,
|
| 4516 |
+
"learning_rate": 5.743016759776536e-05,
|
| 4517 |
+
"loss": 0.8323,
|
| 4518 |
+
"step": 644
|
| 4519 |
+
},
|
| 4520 |
+
{
|
| 4521 |
+
"epoch": 3.5833333333333335,
|
| 4522 |
+
"grad_norm": 0.07090960443019867,
|
| 4523 |
+
"learning_rate": 5.7206703910614524e-05,
|
| 4524 |
+
"loss": 0.8491,
|
| 4525 |
+
"step": 645
|
| 4526 |
+
},
|
| 4527 |
+
{
|
| 4528 |
+
"epoch": 3.588888888888889,
|
| 4529 |
+
"grad_norm": 0.07009242475032806,
|
| 4530 |
+
"learning_rate": 5.6983240223463686e-05,
|
| 4531 |
+
"loss": 0.8861,
|
| 4532 |
+
"step": 646
|
| 4533 |
+
},
|
| 4534 |
+
{
|
| 4535 |
+
"epoch": 3.5944444444444446,
|
| 4536 |
+
"grad_norm": 0.056394800543785095,
|
| 4537 |
+
"learning_rate": 5.675977653631285e-05,
|
| 4538 |
+
"loss": 0.9701,
|
| 4539 |
+
"step": 647
|
| 4540 |
+
},
|
| 4541 |
+
{
|
| 4542 |
+
"epoch": 3.6,
|
| 4543 |
+
"grad_norm": 0.0645764097571373,
|
| 4544 |
+
"learning_rate": 5.653631284916201e-05,
|
| 4545 |
+
"loss": 0.872,
|
| 4546 |
+
"step": 648
|
| 4547 |
+
},
|
| 4548 |
+
{
|
| 4549 |
+
"epoch": 3.6055555555555556,
|
| 4550 |
+
"grad_norm": 0.05909927189350128,
|
| 4551 |
+
"learning_rate": 5.631284916201117e-05,
|
| 4552 |
+
"loss": 0.7737,
|
| 4553 |
+
"step": 649
|
| 4554 |
+
},
|
| 4555 |
+
{
|
| 4556 |
+
"epoch": 3.611111111111111,
|
| 4557 |
+
"grad_norm": 0.06914041191339493,
|
| 4558 |
+
"learning_rate": 5.6089385474860336e-05,
|
| 4559 |
+
"loss": 0.7916,
|
| 4560 |
+
"step": 650
|
| 4561 |
+
},
|
| 4562 |
+
{
|
| 4563 |
+
"epoch": 3.6166666666666667,
|
| 4564 |
+
"grad_norm": 0.06700372695922852,
|
| 4565 |
+
"learning_rate": 5.58659217877095e-05,
|
| 4566 |
+
"loss": 0.9287,
|
| 4567 |
+
"step": 651
|
| 4568 |
+
},
|
| 4569 |
+
{
|
| 4570 |
+
"epoch": 3.6222222222222222,
|
| 4571 |
+
"grad_norm": 0.08735419064760208,
|
| 4572 |
+
"learning_rate": 5.564245810055866e-05,
|
| 4573 |
+
"loss": 0.6891,
|
| 4574 |
+
"step": 652
|
| 4575 |
+
},
|
| 4576 |
+
{
|
| 4577 |
+
"epoch": 3.6277777777777778,
|
| 4578 |
+
"grad_norm": 0.05871176719665527,
|
| 4579 |
+
"learning_rate": 5.541899441340782e-05,
|
| 4580 |
+
"loss": 0.8665,
|
| 4581 |
+
"step": 653
|
| 4582 |
+
},
|
| 4583 |
+
{
|
| 4584 |
+
"epoch": 3.6333333333333333,
|
| 4585 |
+
"grad_norm": 0.060677025467157364,
|
| 4586 |
+
"learning_rate": 5.5195530726256985e-05,
|
| 4587 |
+
"loss": 0.7776,
|
| 4588 |
+
"step": 654
|
| 4589 |
+
},
|
| 4590 |
+
{
|
| 4591 |
+
"epoch": 3.638888888888889,
|
| 4592 |
+
"grad_norm": 0.06749715656042099,
|
| 4593 |
+
"learning_rate": 5.497206703910615e-05,
|
| 4594 |
+
"loss": 0.8589,
|
| 4595 |
+
"step": 655
|
| 4596 |
+
},
|
| 4597 |
+
{
|
| 4598 |
+
"epoch": 3.6444444444444444,
|
| 4599 |
+
"grad_norm": 0.06428337097167969,
|
| 4600 |
+
"learning_rate": 5.474860335195531e-05,
|
| 4601 |
+
"loss": 0.8476,
|
| 4602 |
+
"step": 656
|
| 4603 |
+
},
|
| 4604 |
+
{
|
| 4605 |
+
"epoch": 3.65,
|
| 4606 |
+
"grad_norm": 0.07902880758047104,
|
| 4607 |
+
"learning_rate": 5.452513966480447e-05,
|
| 4608 |
+
"loss": 0.975,
|
| 4609 |
+
"step": 657
|
| 4610 |
+
},
|
| 4611 |
+
{
|
| 4612 |
+
"epoch": 3.6555555555555554,
|
| 4613 |
+
"grad_norm": 0.07362475991249084,
|
| 4614 |
+
"learning_rate": 5.4301675977653634e-05,
|
| 4615 |
+
"loss": 0.8756,
|
| 4616 |
+
"step": 658
|
| 4617 |
+
},
|
| 4618 |
+
{
|
| 4619 |
+
"epoch": 3.661111111111111,
|
| 4620 |
+
"grad_norm": 0.07670604437589645,
|
| 4621 |
+
"learning_rate": 5.4078212290502797e-05,
|
| 4622 |
+
"loss": 0.9614,
|
| 4623 |
+
"step": 659
|
| 4624 |
+
},
|
| 4625 |
+
{
|
| 4626 |
+
"epoch": 3.6666666666666665,
|
| 4627 |
+
"grad_norm": 0.05876456946134567,
|
| 4628 |
+
"learning_rate": 5.385474860335196e-05,
|
| 4629 |
+
"loss": 1.0271,
|
| 4630 |
+
"step": 660
|
| 4631 |
+
},
|
| 4632 |
+
{
|
| 4633 |
+
"epoch": 3.6722222222222225,
|
| 4634 |
+
"grad_norm": 0.06619323790073395,
|
| 4635 |
+
"learning_rate": 5.363128491620112e-05,
|
| 4636 |
+
"loss": 0.9151,
|
| 4637 |
+
"step": 661
|
| 4638 |
+
},
|
| 4639 |
+
{
|
| 4640 |
+
"epoch": 3.677777777777778,
|
| 4641 |
+
"grad_norm": 0.06175459921360016,
|
| 4642 |
+
"learning_rate": 5.3407821229050284e-05,
|
| 4643 |
+
"loss": 0.9548,
|
| 4644 |
+
"step": 662
|
| 4645 |
+
},
|
| 4646 |
+
{
|
| 4647 |
+
"epoch": 3.6833333333333336,
|
| 4648 |
+
"grad_norm": 0.06050381436944008,
|
| 4649 |
+
"learning_rate": 5.3184357541899446e-05,
|
| 4650 |
+
"loss": 0.9309,
|
| 4651 |
+
"step": 663
|
| 4652 |
+
},
|
| 4653 |
+
{
|
| 4654 |
+
"epoch": 3.688888888888889,
|
| 4655 |
+
"grad_norm": 0.07536690682172775,
|
| 4656 |
+
"learning_rate": 5.296089385474861e-05,
|
| 4657 |
+
"loss": 0.8292,
|
| 4658 |
+
"step": 664
|
| 4659 |
+
},
|
| 4660 |
+
{
|
| 4661 |
+
"epoch": 3.6944444444444446,
|
| 4662 |
+
"grad_norm": 0.05690660700201988,
|
| 4663 |
+
"learning_rate": 5.273743016759777e-05,
|
| 4664 |
+
"loss": 0.8964,
|
| 4665 |
+
"step": 665
|
| 4666 |
+
},
|
| 4667 |
+
{
|
| 4668 |
+
"epoch": 3.7,
|
| 4669 |
+
"grad_norm": 0.05835000425577164,
|
| 4670 |
+
"learning_rate": 5.251396648044693e-05,
|
| 4671 |
+
"loss": 0.8679,
|
| 4672 |
+
"step": 666
|
| 4673 |
+
},
|
| 4674 |
+
{
|
| 4675 |
+
"epoch": 3.7055555555555557,
|
| 4676 |
+
"grad_norm": 0.061024926602840424,
|
| 4677 |
+
"learning_rate": 5.2290502793296095e-05,
|
| 4678 |
+
"loss": 0.8628,
|
| 4679 |
+
"step": 667
|
| 4680 |
+
},
|
| 4681 |
+
{
|
| 4682 |
+
"epoch": 3.7111111111111112,
|
| 4683 |
+
"grad_norm": 0.07365045696496964,
|
| 4684 |
+
"learning_rate": 5.206703910614526e-05,
|
| 4685 |
+
"loss": 0.918,
|
| 4686 |
+
"step": 668
|
| 4687 |
+
},
|
| 4688 |
+
{
|
| 4689 |
+
"epoch": 3.716666666666667,
|
| 4690 |
+
"grad_norm": 0.06592633575201035,
|
| 4691 |
+
"learning_rate": 5.184357541899442e-05,
|
| 4692 |
+
"loss": 0.9424,
|
| 4693 |
+
"step": 669
|
| 4694 |
+
},
|
| 4695 |
+
{
|
| 4696 |
+
"epoch": 3.7222222222222223,
|
| 4697 |
+
"grad_norm": 0.056026358157396317,
|
| 4698 |
+
"learning_rate": 5.162011173184358e-05,
|
| 4699 |
+
"loss": 0.8151,
|
| 4700 |
+
"step": 670
|
| 4701 |
+
},
|
| 4702 |
+
{
|
| 4703 |
+
"epoch": 3.727777777777778,
|
| 4704 |
+
"grad_norm": 0.07303276658058167,
|
| 4705 |
+
"learning_rate": 5.139664804469274e-05,
|
| 4706 |
+
"loss": 0.8787,
|
| 4707 |
+
"step": 671
|
| 4708 |
+
},
|
| 4709 |
+
{
|
| 4710 |
+
"epoch": 3.7333333333333334,
|
| 4711 |
+
"grad_norm": 0.07177183032035828,
|
| 4712 |
+
"learning_rate": 5.11731843575419e-05,
|
| 4713 |
+
"loss": 0.867,
|
| 4714 |
+
"step": 672
|
| 4715 |
+
},
|
| 4716 |
+
{
|
| 4717 |
+
"epoch": 3.738888888888889,
|
| 4718 |
+
"grad_norm": 0.06418969482183456,
|
| 4719 |
+
"learning_rate": 5.094972067039106e-05,
|
| 4720 |
+
"loss": 0.9005,
|
| 4721 |
+
"step": 673
|
| 4722 |
+
},
|
| 4723 |
+
{
|
| 4724 |
+
"epoch": 3.7444444444444445,
|
| 4725 |
+
"grad_norm": 0.07607243955135345,
|
| 4726 |
+
"learning_rate": 5.0726256983240225e-05,
|
| 4727 |
+
"loss": 0.646,
|
| 4728 |
+
"step": 674
|
| 4729 |
+
},
|
| 4730 |
+
{
|
| 4731 |
+
"epoch": 3.75,
|
| 4732 |
+
"grad_norm": 0.06639571487903595,
|
| 4733 |
+
"learning_rate": 5.050279329608939e-05,
|
| 4734 |
+
"loss": 0.9202,
|
| 4735 |
+
"step": 675
|
| 4736 |
+
},
|
| 4737 |
+
{
|
| 4738 |
+
"epoch": 3.7555555555555555,
|
| 4739 |
+
"grad_norm": 0.06520118564367294,
|
| 4740 |
+
"learning_rate": 5.027932960893855e-05,
|
| 4741 |
+
"loss": 1.0109,
|
| 4742 |
+
"step": 676
|
| 4743 |
+
},
|
| 4744 |
+
{
|
| 4745 |
+
"epoch": 3.761111111111111,
|
| 4746 |
+
"grad_norm": 0.06542754173278809,
|
| 4747 |
+
"learning_rate": 5.005586592178771e-05,
|
| 4748 |
+
"loss": 0.9124,
|
| 4749 |
+
"step": 677
|
| 4750 |
+
},
|
| 4751 |
+
{
|
| 4752 |
+
"epoch": 3.7666666666666666,
|
| 4753 |
+
"grad_norm": 0.06325247138738632,
|
| 4754 |
+
"learning_rate": 4.9832402234636874e-05,
|
| 4755 |
+
"loss": 0.7919,
|
| 4756 |
+
"step": 678
|
| 4757 |
+
},
|
| 4758 |
+
{
|
| 4759 |
+
"epoch": 3.772222222222222,
|
| 4760 |
+
"grad_norm": 0.06934817135334015,
|
| 4761 |
+
"learning_rate": 4.9608938547486036e-05,
|
| 4762 |
+
"loss": 0.9353,
|
| 4763 |
+
"step": 679
|
| 4764 |
+
},
|
| 4765 |
+
{
|
| 4766 |
+
"epoch": 3.7777777777777777,
|
| 4767 |
+
"grad_norm": 0.06293205171823502,
|
| 4768 |
+
"learning_rate": 4.93854748603352e-05,
|
| 4769 |
+
"loss": 0.8681,
|
| 4770 |
+
"step": 680
|
| 4771 |
+
},
|
| 4772 |
+
{
|
| 4773 |
+
"epoch": 3.783333333333333,
|
| 4774 |
+
"grad_norm": 0.05741385743021965,
|
| 4775 |
+
"learning_rate": 4.916201117318436e-05,
|
| 4776 |
+
"loss": 0.9049,
|
| 4777 |
+
"step": 681
|
| 4778 |
+
},
|
| 4779 |
+
{
|
| 4780 |
+
"epoch": 3.7888888888888888,
|
| 4781 |
+
"grad_norm": 0.06111886352300644,
|
| 4782 |
+
"learning_rate": 4.8938547486033523e-05,
|
| 4783 |
+
"loss": 0.8285,
|
| 4784 |
+
"step": 682
|
| 4785 |
+
},
|
| 4786 |
+
{
|
| 4787 |
+
"epoch": 3.7944444444444443,
|
| 4788 |
+
"grad_norm": 0.06742309778928757,
|
| 4789 |
+
"learning_rate": 4.8715083798882686e-05,
|
| 4790 |
+
"loss": 0.8506,
|
| 4791 |
+
"step": 683
|
| 4792 |
+
},
|
| 4793 |
+
{
|
| 4794 |
+
"epoch": 3.8,
|
| 4795 |
+
"grad_norm": 0.07073011994361877,
|
| 4796 |
+
"learning_rate": 4.849162011173184e-05,
|
| 4797 |
+
"loss": 0.8671,
|
| 4798 |
+
"step": 684
|
| 4799 |
+
},
|
| 4800 |
+
{
|
| 4801 |
+
"epoch": 3.8055555555555554,
|
| 4802 |
+
"grad_norm": 0.05728191137313843,
|
| 4803 |
+
"learning_rate": 4.8268156424581004e-05,
|
| 4804 |
+
"loss": 0.8419,
|
| 4805 |
+
"step": 685
|
| 4806 |
+
},
|
| 4807 |
+
{
|
| 4808 |
+
"epoch": 3.811111111111111,
|
| 4809 |
+
"grad_norm": 0.06402203440666199,
|
| 4810 |
+
"learning_rate": 4.8044692737430166e-05,
|
| 4811 |
+
"loss": 0.841,
|
| 4812 |
+
"step": 686
|
| 4813 |
+
},
|
| 4814 |
+
{
|
| 4815 |
+
"epoch": 3.8166666666666664,
|
| 4816 |
+
"grad_norm": 0.0669245570898056,
|
| 4817 |
+
"learning_rate": 4.782122905027933e-05,
|
| 4818 |
+
"loss": 0.8425,
|
| 4819 |
+
"step": 687
|
| 4820 |
+
},
|
| 4821 |
+
{
|
| 4822 |
+
"epoch": 3.822222222222222,
|
| 4823 |
+
"grad_norm": 0.06659059971570969,
|
| 4824 |
+
"learning_rate": 4.759776536312849e-05,
|
| 4825 |
+
"loss": 0.9707,
|
| 4826 |
+
"step": 688
|
| 4827 |
+
},
|
| 4828 |
+
{
|
| 4829 |
+
"epoch": 3.8277777777777775,
|
| 4830 |
+
"grad_norm": 0.06549696624279022,
|
| 4831 |
+
"learning_rate": 4.737430167597765e-05,
|
| 4832 |
+
"loss": 0.9782,
|
| 4833 |
+
"step": 689
|
| 4834 |
+
},
|
| 4835 |
+
{
|
| 4836 |
+
"epoch": 3.8333333333333335,
|
| 4837 |
+
"grad_norm": 0.05991567671298981,
|
| 4838 |
+
"learning_rate": 4.7150837988826815e-05,
|
| 4839 |
+
"loss": 0.8978,
|
| 4840 |
+
"step": 690
|
| 4841 |
+
},
|
| 4842 |
+
{
|
| 4843 |
+
"epoch": 3.838888888888889,
|
| 4844 |
+
"grad_norm": 0.07296551018953323,
|
| 4845 |
+
"learning_rate": 4.692737430167598e-05,
|
| 4846 |
+
"loss": 0.8963,
|
| 4847 |
+
"step": 691
|
| 4848 |
+
},
|
| 4849 |
+
{
|
| 4850 |
+
"epoch": 3.8444444444444446,
|
| 4851 |
+
"grad_norm": 0.07527071982622147,
|
| 4852 |
+
"learning_rate": 4.670391061452514e-05,
|
| 4853 |
+
"loss": 0.8314,
|
| 4854 |
+
"step": 692
|
| 4855 |
+
},
|
| 4856 |
+
{
|
| 4857 |
+
"epoch": 3.85,
|
| 4858 |
+
"grad_norm": 0.08402854949235916,
|
| 4859 |
+
"learning_rate": 4.64804469273743e-05,
|
| 4860 |
+
"loss": 0.7886,
|
| 4861 |
+
"step": 693
|
| 4862 |
+
},
|
| 4863 |
+
{
|
| 4864 |
+
"epoch": 3.8555555555555556,
|
| 4865 |
+
"grad_norm": 0.06636254489421844,
|
| 4866 |
+
"learning_rate": 4.6256983240223465e-05,
|
| 4867 |
+
"loss": 0.8322,
|
| 4868 |
+
"step": 694
|
| 4869 |
+
},
|
| 4870 |
+
{
|
| 4871 |
+
"epoch": 3.861111111111111,
|
| 4872 |
+
"grad_norm": 0.06297782063484192,
|
| 4873 |
+
"learning_rate": 4.603351955307263e-05,
|
| 4874 |
+
"loss": 0.9423,
|
| 4875 |
+
"step": 695
|
| 4876 |
+
},
|
| 4877 |
+
{
|
| 4878 |
+
"epoch": 3.8666666666666667,
|
| 4879 |
+
"grad_norm": 0.07213331013917923,
|
| 4880 |
+
"learning_rate": 4.581005586592179e-05,
|
| 4881 |
+
"loss": 0.8553,
|
| 4882 |
+
"step": 696
|
| 4883 |
+
},
|
| 4884 |
+
{
|
| 4885 |
+
"epoch": 3.8722222222222222,
|
| 4886 |
+
"grad_norm": 0.0674607902765274,
|
| 4887 |
+
"learning_rate": 4.558659217877095e-05,
|
| 4888 |
+
"loss": 0.8657,
|
| 4889 |
+
"step": 697
|
| 4890 |
+
},
|
| 4891 |
+
{
|
| 4892 |
+
"epoch": 3.8777777777777778,
|
| 4893 |
+
"grad_norm": 0.06301897764205933,
|
| 4894 |
+
"learning_rate": 4.5363128491620114e-05,
|
| 4895 |
+
"loss": 0.8108,
|
| 4896 |
+
"step": 698
|
| 4897 |
+
},
|
| 4898 |
+
{
|
| 4899 |
+
"epoch": 3.8833333333333333,
|
| 4900 |
+
"grad_norm": 0.059832848608493805,
|
| 4901 |
+
"learning_rate": 4.5139664804469276e-05,
|
| 4902 |
+
"loss": 0.918,
|
| 4903 |
+
"step": 699
|
| 4904 |
+
},
|
| 4905 |
+
{
|
| 4906 |
+
"epoch": 3.888888888888889,
|
| 4907 |
+
"grad_norm": 0.06674478203058243,
|
| 4908 |
+
"learning_rate": 4.491620111731844e-05,
|
| 4909 |
+
"loss": 0.8521,
|
| 4910 |
+
"step": 700
|
| 4911 |
+
},
|
| 4912 |
+
{
|
| 4913 |
+
"epoch": 3.8944444444444444,
|
| 4914 |
+
"grad_norm": 0.07494413107633591,
|
| 4915 |
+
"learning_rate": 4.4692737430167594e-05,
|
| 4916 |
+
"loss": 0.8805,
|
| 4917 |
+
"step": 701
|
| 4918 |
+
},
|
| 4919 |
+
{
|
| 4920 |
+
"epoch": 3.9,
|
| 4921 |
+
"grad_norm": 0.06808764487504959,
|
| 4922 |
+
"learning_rate": 4.4469273743016757e-05,
|
| 4923 |
+
"loss": 0.822,
|
| 4924 |
+
"step": 702
|
| 4925 |
+
},
|
| 4926 |
+
{
|
| 4927 |
+
"epoch": 3.9055555555555554,
|
| 4928 |
+
"grad_norm": 0.061348509043455124,
|
| 4929 |
+
"learning_rate": 4.424581005586592e-05,
|
| 4930 |
+
"loss": 0.8472,
|
| 4931 |
+
"step": 703
|
| 4932 |
+
},
|
| 4933 |
+
{
|
| 4934 |
+
"epoch": 3.911111111111111,
|
| 4935 |
+
"grad_norm": 0.06749361008405685,
|
| 4936 |
+
"learning_rate": 4.402234636871508e-05,
|
| 4937 |
+
"loss": 0.8881,
|
| 4938 |
+
"step": 704
|
| 4939 |
+
},
|
| 4940 |
+
{
|
| 4941 |
+
"epoch": 3.9166666666666665,
|
| 4942 |
+
"grad_norm": 0.05673949047923088,
|
| 4943 |
+
"learning_rate": 4.3798882681564244e-05,
|
| 4944 |
+
"loss": 0.7887,
|
| 4945 |
+
"step": 705
|
| 4946 |
+
},
|
| 4947 |
+
{
|
| 4948 |
+
"epoch": 3.9222222222222225,
|
| 4949 |
+
"grad_norm": 0.06126287952065468,
|
| 4950 |
+
"learning_rate": 4.3575418994413406e-05,
|
| 4951 |
+
"loss": 0.9302,
|
| 4952 |
+
"step": 706
|
| 4953 |
+
},
|
| 4954 |
+
{
|
| 4955 |
+
"epoch": 3.927777777777778,
|
| 4956 |
+
"grad_norm": 0.06536653637886047,
|
| 4957 |
+
"learning_rate": 4.335195530726257e-05,
|
| 4958 |
+
"loss": 0.8985,
|
| 4959 |
+
"step": 707
|
| 4960 |
+
},
|
| 4961 |
+
{
|
| 4962 |
+
"epoch": 3.9333333333333336,
|
| 4963 |
+
"grad_norm": 0.06179942563176155,
|
| 4964 |
+
"learning_rate": 4.312849162011173e-05,
|
| 4965 |
+
"loss": 0.9524,
|
| 4966 |
+
"step": 708
|
| 4967 |
+
},
|
| 4968 |
+
{
|
| 4969 |
+
"epoch": 3.938888888888889,
|
| 4970 |
+
"grad_norm": 0.06911918520927429,
|
| 4971 |
+
"learning_rate": 4.290502793296089e-05,
|
| 4972 |
+
"loss": 0.8222,
|
| 4973 |
+
"step": 709
|
| 4974 |
+
},
|
| 4975 |
+
{
|
| 4976 |
+
"epoch": 3.9444444444444446,
|
| 4977 |
+
"grad_norm": 0.058220285922288895,
|
| 4978 |
+
"learning_rate": 4.2681564245810055e-05,
|
| 4979 |
+
"loss": 0.7177,
|
| 4980 |
+
"step": 710
|
| 4981 |
+
},
|
| 4982 |
+
{
|
| 4983 |
+
"epoch": 3.95,
|
| 4984 |
+
"grad_norm": 0.06330408155918121,
|
| 4985 |
+
"learning_rate": 4.245810055865922e-05,
|
| 4986 |
+
"loss": 0.7577,
|
| 4987 |
+
"step": 711
|
| 4988 |
+
},
|
| 4989 |
+
{
|
| 4990 |
+
"epoch": 3.9555555555555557,
|
| 4991 |
+
"grad_norm": 0.06855887174606323,
|
| 4992 |
+
"learning_rate": 4.223463687150838e-05,
|
| 4993 |
+
"loss": 0.927,
|
| 4994 |
+
"step": 712
|
| 4995 |
+
},
|
| 4996 |
+
{
|
| 4997 |
+
"epoch": 3.9611111111111112,
|
| 4998 |
+
"grad_norm": 0.0702371895313263,
|
| 4999 |
+
"learning_rate": 4.201117318435754e-05,
|
| 5000 |
+
"loss": 0.7464,
|
| 5001 |
+
"step": 713
|
| 5002 |
+
},
|
| 5003 |
+
{
|
| 5004 |
+
"epoch": 3.966666666666667,
|
| 5005 |
+
"grad_norm": 0.07532446086406708,
|
| 5006 |
+
"learning_rate": 4.1787709497206705e-05,
|
| 5007 |
+
"loss": 0.8202,
|
| 5008 |
+
"step": 714
|
| 5009 |
+
},
|
| 5010 |
+
{
|
| 5011 |
+
"epoch": 3.9722222222222223,
|
| 5012 |
+
"grad_norm": 0.07662215083837509,
|
| 5013 |
+
"learning_rate": 4.156424581005587e-05,
|
| 5014 |
+
"loss": 0.8381,
|
| 5015 |
+
"step": 715
|
| 5016 |
+
},
|
| 5017 |
+
{
|
| 5018 |
+
"epoch": 3.977777777777778,
|
| 5019 |
+
"grad_norm": 0.0621059276163578,
|
| 5020 |
+
"learning_rate": 4.134078212290503e-05,
|
| 5021 |
+
"loss": 0.7477,
|
| 5022 |
+
"step": 716
|
| 5023 |
+
},
|
| 5024 |
+
{
|
| 5025 |
+
"epoch": 3.9833333333333334,
|
| 5026 |
+
"grad_norm": 0.06151144951581955,
|
| 5027 |
+
"learning_rate": 4.111731843575419e-05,
|
| 5028 |
+
"loss": 0.8781,
|
| 5029 |
+
"step": 717
|
| 5030 |
+
},
|
| 5031 |
+
{
|
| 5032 |
+
"epoch": 3.988888888888889,
|
| 5033 |
+
"grad_norm": 0.0767519623041153,
|
| 5034 |
+
"learning_rate": 4.0893854748603354e-05,
|
| 5035 |
+
"loss": 0.9478,
|
| 5036 |
+
"step": 718
|
| 5037 |
+
},
|
| 5038 |
+
{
|
| 5039 |
+
"epoch": 3.9944444444444445,
|
| 5040 |
+
"grad_norm": 0.07194758951663971,
|
| 5041 |
+
"learning_rate": 4.0670391061452516e-05,
|
| 5042 |
+
"loss": 0.7946,
|
| 5043 |
+
"step": 719
|
| 5044 |
+
},
|
| 5045 |
+
{
|
| 5046 |
+
"epoch": 4.0,
|
| 5047 |
+
"grad_norm": 0.07248340547084808,
|
| 5048 |
+
"learning_rate": 4.044692737430168e-05,
|
| 5049 |
+
"loss": 0.8131,
|
| 5050 |
+
"step": 720
|
| 5051 |
+
},
|
| 5052 |
+
{
|
| 5053 |
+
"epoch": 4.0055555555555555,
|
| 5054 |
+
"grad_norm": 0.05817415192723274,
|
| 5055 |
+
"learning_rate": 4.022346368715084e-05,
|
| 5056 |
+
"loss": 0.9452,
|
| 5057 |
+
"step": 721
|
| 5058 |
+
},
|
| 5059 |
+
{
|
| 5060 |
+
"epoch": 4.011111111111111,
|
| 5061 |
+
"grad_norm": 0.06506936997175217,
|
| 5062 |
+
"learning_rate": 4e-05,
|
| 5063 |
+
"loss": 0.8357,
|
| 5064 |
+
"step": 722
|
| 5065 |
+
},
|
| 5066 |
+
{
|
| 5067 |
+
"epoch": 4.016666666666667,
|
| 5068 |
+
"grad_norm": 0.06475253403186798,
|
| 5069 |
+
"learning_rate": 3.9776536312849166e-05,
|
| 5070 |
+
"loss": 0.8215,
|
| 5071 |
+
"step": 723
|
| 5072 |
+
},
|
| 5073 |
+
{
|
| 5074 |
+
"epoch": 4.022222222222222,
|
| 5075 |
+
"grad_norm": 0.06521406769752502,
|
| 5076 |
+
"learning_rate": 3.955307262569833e-05,
|
| 5077 |
+
"loss": 0.8996,
|
| 5078 |
+
"step": 724
|
| 5079 |
+
},
|
| 5080 |
+
{
|
| 5081 |
+
"epoch": 4.027777777777778,
|
| 5082 |
+
"grad_norm": 0.06060001254081726,
|
| 5083 |
+
"learning_rate": 3.9329608938547483e-05,
|
| 5084 |
+
"loss": 0.7959,
|
| 5085 |
+
"step": 725
|
| 5086 |
+
},
|
| 5087 |
+
{
|
| 5088 |
+
"epoch": 4.033333333333333,
|
| 5089 |
+
"grad_norm": 0.059213463217020035,
|
| 5090 |
+
"learning_rate": 3.9106145251396646e-05,
|
| 5091 |
+
"loss": 0.9704,
|
| 5092 |
+
"step": 726
|
| 5093 |
+
},
|
| 5094 |
+
{
|
| 5095 |
+
"epoch": 4.038888888888889,
|
| 5096 |
+
"grad_norm": 0.05990111082792282,
|
| 5097 |
+
"learning_rate": 3.888268156424581e-05,
|
| 5098 |
+
"loss": 0.8538,
|
| 5099 |
+
"step": 727
|
| 5100 |
+
},
|
| 5101 |
+
{
|
| 5102 |
+
"epoch": 4.044444444444444,
|
| 5103 |
+
"grad_norm": 0.057270485907793045,
|
| 5104 |
+
"learning_rate": 3.865921787709497e-05,
|
| 5105 |
+
"loss": 0.8653,
|
| 5106 |
+
"step": 728
|
| 5107 |
+
},
|
| 5108 |
+
{
|
| 5109 |
+
"epoch": 4.05,
|
| 5110 |
+
"grad_norm": 0.0747293010354042,
|
| 5111 |
+
"learning_rate": 3.843575418994413e-05,
|
| 5112 |
+
"loss": 0.9716,
|
| 5113 |
+
"step": 729
|
| 5114 |
+
},
|
| 5115 |
+
{
|
| 5116 |
+
"epoch": 4.055555555555555,
|
| 5117 |
+
"grad_norm": 0.06068810448050499,
|
| 5118 |
+
"learning_rate": 3.8212290502793295e-05,
|
| 5119 |
+
"loss": 0.8447,
|
| 5120 |
+
"step": 730
|
| 5121 |
+
},
|
| 5122 |
+
{
|
| 5123 |
+
"epoch": 4.061111111111111,
|
| 5124 |
+
"grad_norm": 0.0651971846818924,
|
| 5125 |
+
"learning_rate": 3.798882681564246e-05,
|
| 5126 |
+
"loss": 0.7749,
|
| 5127 |
+
"step": 731
|
| 5128 |
+
},
|
| 5129 |
+
{
|
| 5130 |
+
"epoch": 4.066666666666666,
|
| 5131 |
+
"grad_norm": 0.07306065410375595,
|
| 5132 |
+
"learning_rate": 3.776536312849162e-05,
|
| 5133 |
+
"loss": 0.9461,
|
| 5134 |
+
"step": 732
|
| 5135 |
+
},
|
| 5136 |
+
{
|
| 5137 |
+
"epoch": 4.072222222222222,
|
| 5138 |
+
"grad_norm": 0.06800976395606995,
|
| 5139 |
+
"learning_rate": 3.754189944134078e-05,
|
| 5140 |
+
"loss": 0.8621,
|
| 5141 |
+
"step": 733
|
| 5142 |
+
},
|
| 5143 |
+
{
|
| 5144 |
+
"epoch": 4.0777777777777775,
|
| 5145 |
+
"grad_norm": 0.07779917120933533,
|
| 5146 |
+
"learning_rate": 3.7318435754189944e-05,
|
| 5147 |
+
"loss": 0.8329,
|
| 5148 |
+
"step": 734
|
| 5149 |
+
},
|
| 5150 |
+
{
|
| 5151 |
+
"epoch": 4.083333333333333,
|
| 5152 |
+
"grad_norm": 0.07067432999610901,
|
| 5153 |
+
"learning_rate": 3.709497206703911e-05,
|
| 5154 |
+
"loss": 0.8911,
|
| 5155 |
+
"step": 735
|
| 5156 |
+
},
|
| 5157 |
+
{
|
| 5158 |
+
"epoch": 4.088888888888889,
|
| 5159 |
+
"grad_norm": 0.0689227506518364,
|
| 5160 |
+
"learning_rate": 3.687150837988827e-05,
|
| 5161 |
+
"loss": 0.8637,
|
| 5162 |
+
"step": 736
|
| 5163 |
+
},
|
| 5164 |
+
{
|
| 5165 |
+
"epoch": 4.094444444444444,
|
| 5166 |
+
"grad_norm": 0.07578172534704208,
|
| 5167 |
+
"learning_rate": 3.664804469273743e-05,
|
| 5168 |
+
"loss": 0.7896,
|
| 5169 |
+
"step": 737
|
| 5170 |
+
},
|
| 5171 |
+
{
|
| 5172 |
+
"epoch": 4.1,
|
| 5173 |
+
"grad_norm": 0.06386541575193405,
|
| 5174 |
+
"learning_rate": 3.6424581005586594e-05,
|
| 5175 |
+
"loss": 0.814,
|
| 5176 |
+
"step": 738
|
| 5177 |
+
},
|
| 5178 |
+
{
|
| 5179 |
+
"epoch": 4.105555555555555,
|
| 5180 |
+
"grad_norm": 0.0776628702878952,
|
| 5181 |
+
"learning_rate": 3.6201117318435756e-05,
|
| 5182 |
+
"loss": 0.8236,
|
| 5183 |
+
"step": 739
|
| 5184 |
+
},
|
| 5185 |
+
{
|
| 5186 |
+
"epoch": 4.111111111111111,
|
| 5187 |
+
"grad_norm": 0.07262839376926422,
|
| 5188 |
+
"learning_rate": 3.597765363128492e-05,
|
| 5189 |
+
"loss": 0.7797,
|
| 5190 |
+
"step": 740
|
| 5191 |
+
},
|
| 5192 |
+
{
|
| 5193 |
+
"epoch": 4.116666666666666,
|
| 5194 |
+
"grad_norm": 0.06866869330406189,
|
| 5195 |
+
"learning_rate": 3.575418994413408e-05,
|
| 5196 |
+
"loss": 0.7972,
|
| 5197 |
+
"step": 741
|
| 5198 |
+
},
|
| 5199 |
+
{
|
| 5200 |
+
"epoch": 4.122222222222222,
|
| 5201 |
+
"grad_norm": 0.0663917064666748,
|
| 5202 |
+
"learning_rate": 3.553072625698324e-05,
|
| 5203 |
+
"loss": 0.9338,
|
| 5204 |
+
"step": 742
|
| 5205 |
+
},
|
| 5206 |
+
{
|
| 5207 |
+
"epoch": 4.127777777777778,
|
| 5208 |
+
"grad_norm": 0.06902816146612167,
|
| 5209 |
+
"learning_rate": 3.5307262569832406e-05,
|
| 5210 |
+
"loss": 1.0073,
|
| 5211 |
+
"step": 743
|
| 5212 |
+
},
|
| 5213 |
+
{
|
| 5214 |
+
"epoch": 4.133333333333334,
|
| 5215 |
+
"grad_norm": 0.06993508338928223,
|
| 5216 |
+
"learning_rate": 3.508379888268157e-05,
|
| 5217 |
+
"loss": 0.7839,
|
| 5218 |
+
"step": 744
|
| 5219 |
+
},
|
| 5220 |
+
{
|
| 5221 |
+
"epoch": 4.138888888888889,
|
| 5222 |
+
"grad_norm": 0.0647396519780159,
|
| 5223 |
+
"learning_rate": 3.486033519553073e-05,
|
| 5224 |
+
"loss": 0.8312,
|
| 5225 |
+
"step": 745
|
| 5226 |
+
},
|
| 5227 |
+
{
|
| 5228 |
+
"epoch": 4.144444444444445,
|
| 5229 |
+
"grad_norm": 0.053603801876306534,
|
| 5230 |
+
"learning_rate": 3.463687150837989e-05,
|
| 5231 |
+
"loss": 0.7799,
|
| 5232 |
+
"step": 746
|
| 5233 |
+
},
|
| 5234 |
+
{
|
| 5235 |
+
"epoch": 4.15,
|
| 5236 |
+
"grad_norm": 0.06209754943847656,
|
| 5237 |
+
"learning_rate": 3.4413407821229055e-05,
|
| 5238 |
+
"loss": 0.8136,
|
| 5239 |
+
"step": 747
|
| 5240 |
+
},
|
| 5241 |
+
{
|
| 5242 |
+
"epoch": 4.155555555555556,
|
| 5243 |
+
"grad_norm": 0.09486910700798035,
|
| 5244 |
+
"learning_rate": 3.418994413407821e-05,
|
| 5245 |
+
"loss": 0.6833,
|
| 5246 |
+
"step": 748
|
| 5247 |
+
},
|
| 5248 |
+
{
|
| 5249 |
+
"epoch": 4.161111111111111,
|
| 5250 |
+
"grad_norm": 0.06692782789468765,
|
| 5251 |
+
"learning_rate": 3.396648044692737e-05,
|
| 5252 |
+
"loss": 0.8619,
|
| 5253 |
+
"step": 749
|
| 5254 |
+
},
|
| 5255 |
+
{
|
| 5256 |
+
"epoch": 4.166666666666667,
|
| 5257 |
+
"grad_norm": 0.05541648343205452,
|
| 5258 |
+
"learning_rate": 3.3743016759776535e-05,
|
| 5259 |
+
"loss": 0.8244,
|
| 5260 |
+
"step": 750
|
| 5261 |
+
},
|
| 5262 |
+
{
|
| 5263 |
+
"epoch": 4.1722222222222225,
|
| 5264 |
+
"grad_norm": 0.06439889967441559,
|
| 5265 |
+
"learning_rate": 3.35195530726257e-05,
|
| 5266 |
+
"loss": 0.8413,
|
| 5267 |
+
"step": 751
|
| 5268 |
+
},
|
| 5269 |
+
{
|
| 5270 |
+
"epoch": 4.177777777777778,
|
| 5271 |
+
"grad_norm": 0.07166414707899094,
|
| 5272 |
+
"learning_rate": 3.329608938547486e-05,
|
| 5273 |
+
"loss": 0.989,
|
| 5274 |
+
"step": 752
|
| 5275 |
+
},
|
| 5276 |
+
{
|
| 5277 |
+
"epoch": 4.183333333333334,
|
| 5278 |
+
"grad_norm": 0.0640282854437828,
|
| 5279 |
+
"learning_rate": 3.307262569832402e-05,
|
| 5280 |
+
"loss": 0.9962,
|
| 5281 |
+
"step": 753
|
| 5282 |
+
},
|
| 5283 |
+
{
|
| 5284 |
+
"epoch": 4.188888888888889,
|
| 5285 |
+
"grad_norm": 0.07067931443452835,
|
| 5286 |
+
"learning_rate": 3.2849162011173184e-05,
|
| 5287 |
+
"loss": 0.7365,
|
| 5288 |
+
"step": 754
|
| 5289 |
+
},
|
| 5290 |
+
{
|
| 5291 |
+
"epoch": 4.194444444444445,
|
| 5292 |
+
"grad_norm": 0.07782124727964401,
|
| 5293 |
+
"learning_rate": 3.262569832402235e-05,
|
| 5294 |
+
"loss": 0.7209,
|
| 5295 |
+
"step": 755
|
| 5296 |
+
},
|
| 5297 |
+
{
|
| 5298 |
+
"epoch": 4.2,
|
| 5299 |
+
"grad_norm": 0.06391673535108566,
|
| 5300 |
+
"learning_rate": 3.240223463687151e-05,
|
| 5301 |
+
"loss": 0.8479,
|
| 5302 |
+
"step": 756
|
| 5303 |
+
},
|
| 5304 |
+
{
|
| 5305 |
+
"epoch": 4.205555555555556,
|
| 5306 |
+
"grad_norm": 0.06089261546730995,
|
| 5307 |
+
"learning_rate": 3.217877094972067e-05,
|
| 5308 |
+
"loss": 0.7635,
|
| 5309 |
+
"step": 757
|
| 5310 |
+
},
|
| 5311 |
+
{
|
| 5312 |
+
"epoch": 4.211111111111111,
|
| 5313 |
+
"grad_norm": 0.06284917145967484,
|
| 5314 |
+
"learning_rate": 3.1955307262569834e-05,
|
| 5315 |
+
"loss": 0.9128,
|
| 5316 |
+
"step": 758
|
| 5317 |
+
},
|
| 5318 |
+
{
|
| 5319 |
+
"epoch": 4.216666666666667,
|
| 5320 |
+
"grad_norm": 0.060241833329200745,
|
| 5321 |
+
"learning_rate": 3.1731843575418996e-05,
|
| 5322 |
+
"loss": 0.8521,
|
| 5323 |
+
"step": 759
|
| 5324 |
+
},
|
| 5325 |
+
{
|
| 5326 |
+
"epoch": 4.222222222222222,
|
| 5327 |
+
"grad_norm": 0.07311747968196869,
|
| 5328 |
+
"learning_rate": 3.150837988826816e-05,
|
| 5329 |
+
"loss": 0.7396,
|
| 5330 |
+
"step": 760
|
| 5331 |
+
},
|
| 5332 |
+
{
|
| 5333 |
+
"epoch": 4.227777777777778,
|
| 5334 |
+
"grad_norm": 0.06416069716215134,
|
| 5335 |
+
"learning_rate": 3.128491620111732e-05,
|
| 5336 |
+
"loss": 0.8738,
|
| 5337 |
+
"step": 761
|
| 5338 |
+
},
|
| 5339 |
+
{
|
| 5340 |
+
"epoch": 4.233333333333333,
|
| 5341 |
+
"grad_norm": 0.0591534860432148,
|
| 5342 |
+
"learning_rate": 3.106145251396648e-05,
|
| 5343 |
+
"loss": 0.8741,
|
| 5344 |
+
"step": 762
|
| 5345 |
+
},
|
| 5346 |
+
{
|
| 5347 |
+
"epoch": 4.238888888888889,
|
| 5348 |
+
"grad_norm": 0.06801345944404602,
|
| 5349 |
+
"learning_rate": 3.0837988826815645e-05,
|
| 5350 |
+
"loss": 1.0116,
|
| 5351 |
+
"step": 763
|
| 5352 |
+
},
|
| 5353 |
+
{
|
| 5354 |
+
"epoch": 4.2444444444444445,
|
| 5355 |
+
"grad_norm": 0.06485697627067566,
|
| 5356 |
+
"learning_rate": 3.061452513966481e-05,
|
| 5357 |
+
"loss": 0.8626,
|
| 5358 |
+
"step": 764
|
| 5359 |
+
},
|
| 5360 |
+
{
|
| 5361 |
+
"epoch": 4.25,
|
| 5362 |
+
"grad_norm": 0.07373441010713577,
|
| 5363 |
+
"learning_rate": 3.039106145251397e-05,
|
| 5364 |
+
"loss": 0.7986,
|
| 5365 |
+
"step": 765
|
| 5366 |
+
},
|
| 5367 |
+
{
|
| 5368 |
+
"epoch": 4.2555555555555555,
|
| 5369 |
+
"grad_norm": 0.07167431712150574,
|
| 5370 |
+
"learning_rate": 3.0167597765363132e-05,
|
| 5371 |
+
"loss": 0.9507,
|
| 5372 |
+
"step": 766
|
| 5373 |
+
},
|
| 5374 |
+
{
|
| 5375 |
+
"epoch": 4.261111111111111,
|
| 5376 |
+
"grad_norm": 0.06408189237117767,
|
| 5377 |
+
"learning_rate": 2.9944134078212295e-05,
|
| 5378 |
+
"loss": 0.7867,
|
| 5379 |
+
"step": 767
|
| 5380 |
+
},
|
| 5381 |
+
{
|
| 5382 |
+
"epoch": 4.266666666666667,
|
| 5383 |
+
"grad_norm": 0.07456216216087341,
|
| 5384 |
+
"learning_rate": 2.9720670391061457e-05,
|
| 5385 |
+
"loss": 0.934,
|
| 5386 |
+
"step": 768
|
| 5387 |
+
},
|
| 5388 |
+
{
|
| 5389 |
+
"epoch": 4.272222222222222,
|
| 5390 |
+
"grad_norm": 0.060554083436727524,
|
| 5391 |
+
"learning_rate": 2.949720670391062e-05,
|
| 5392 |
+
"loss": 0.8407,
|
| 5393 |
+
"step": 769
|
| 5394 |
+
},
|
| 5395 |
+
{
|
| 5396 |
+
"epoch": 4.277777777777778,
|
| 5397 |
+
"grad_norm": 0.07266189157962799,
|
| 5398 |
+
"learning_rate": 2.9273743016759782e-05,
|
| 5399 |
+
"loss": 0.9627,
|
| 5400 |
+
"step": 770
|
| 5401 |
+
},
|
| 5402 |
+
{
|
| 5403 |
+
"epoch": 4.283333333333333,
|
| 5404 |
+
"grad_norm": 0.06674201786518097,
|
| 5405 |
+
"learning_rate": 2.9050279329608944e-05,
|
| 5406 |
+
"loss": 0.9316,
|
| 5407 |
+
"step": 771
|
| 5408 |
+
},
|
| 5409 |
+
{
|
| 5410 |
+
"epoch": 4.288888888888889,
|
| 5411 |
+
"grad_norm": 0.06580832600593567,
|
| 5412 |
+
"learning_rate": 2.88268156424581e-05,
|
| 5413 |
+
"loss": 0.7616,
|
| 5414 |
+
"step": 772
|
| 5415 |
+
},
|
| 5416 |
+
{
|
| 5417 |
+
"epoch": 4.294444444444444,
|
| 5418 |
+
"grad_norm": 0.067823126912117,
|
| 5419 |
+
"learning_rate": 2.8603351955307262e-05,
|
| 5420 |
+
"loss": 0.7364,
|
| 5421 |
+
"step": 773
|
| 5422 |
+
},
|
| 5423 |
+
{
|
| 5424 |
+
"epoch": 4.3,
|
| 5425 |
+
"grad_norm": 0.06775198131799698,
|
| 5426 |
+
"learning_rate": 2.8379888268156424e-05,
|
| 5427 |
+
"loss": 0.9093,
|
| 5428 |
+
"step": 774
|
| 5429 |
+
},
|
| 5430 |
+
{
|
| 5431 |
+
"epoch": 4.305555555555555,
|
| 5432 |
+
"grad_norm": 0.06065399944782257,
|
| 5433 |
+
"learning_rate": 2.8156424581005587e-05,
|
| 5434 |
+
"loss": 0.6999,
|
| 5435 |
+
"step": 775
|
| 5436 |
+
},
|
| 5437 |
+
{
|
| 5438 |
+
"epoch": 4.311111111111111,
|
| 5439 |
+
"grad_norm": 0.06072010472416878,
|
| 5440 |
+
"learning_rate": 2.793296089385475e-05,
|
| 5441 |
+
"loss": 0.8397,
|
| 5442 |
+
"step": 776
|
| 5443 |
+
},
|
| 5444 |
+
{
|
| 5445 |
+
"epoch": 4.316666666666666,
|
| 5446 |
+
"grad_norm": 0.06833003461360931,
|
| 5447 |
+
"learning_rate": 2.770949720670391e-05,
|
| 5448 |
+
"loss": 0.7948,
|
| 5449 |
+
"step": 777
|
| 5450 |
+
},
|
| 5451 |
+
{
|
| 5452 |
+
"epoch": 4.322222222222222,
|
| 5453 |
+
"grad_norm": 0.06961791962385178,
|
| 5454 |
+
"learning_rate": 2.7486033519553074e-05,
|
| 5455 |
+
"loss": 0.8539,
|
| 5456 |
+
"step": 778
|
| 5457 |
+
},
|
| 5458 |
+
{
|
| 5459 |
+
"epoch": 4.3277777777777775,
|
| 5460 |
+
"grad_norm": 0.07114412635564804,
|
| 5461 |
+
"learning_rate": 2.7262569832402236e-05,
|
| 5462 |
+
"loss": 0.835,
|
| 5463 |
+
"step": 779
|
| 5464 |
+
},
|
| 5465 |
+
{
|
| 5466 |
+
"epoch": 4.333333333333333,
|
| 5467 |
+
"grad_norm": 0.07904283702373505,
|
| 5468 |
+
"learning_rate": 2.7039106145251398e-05,
|
| 5469 |
+
"loss": 0.7917,
|
| 5470 |
+
"step": 780
|
| 5471 |
+
},
|
| 5472 |
+
{
|
| 5473 |
+
"epoch": 4.338888888888889,
|
| 5474 |
+
"grad_norm": 0.06877896934747696,
|
| 5475 |
+
"learning_rate": 2.681564245810056e-05,
|
| 5476 |
+
"loss": 0.9066,
|
| 5477 |
+
"step": 781
|
| 5478 |
+
},
|
| 5479 |
+
{
|
| 5480 |
+
"epoch": 4.344444444444444,
|
| 5481 |
+
"grad_norm": 0.06604032218456268,
|
| 5482 |
+
"learning_rate": 2.6592178770949723e-05,
|
| 5483 |
+
"loss": 0.8103,
|
| 5484 |
+
"step": 782
|
| 5485 |
+
},
|
| 5486 |
+
{
|
| 5487 |
+
"epoch": 4.35,
|
| 5488 |
+
"grad_norm": 0.06570107489824295,
|
| 5489 |
+
"learning_rate": 2.6368715083798885e-05,
|
| 5490 |
+
"loss": 0.9216,
|
| 5491 |
+
"step": 783
|
| 5492 |
+
},
|
| 5493 |
+
{
|
| 5494 |
+
"epoch": 4.355555555555555,
|
| 5495 |
+
"grad_norm": 0.0643831342458725,
|
| 5496 |
+
"learning_rate": 2.6145251396648048e-05,
|
| 5497 |
+
"loss": 0.8306,
|
| 5498 |
+
"step": 784
|
| 5499 |
+
},
|
| 5500 |
+
{
|
| 5501 |
+
"epoch": 4.361111111111111,
|
| 5502 |
+
"grad_norm": 0.06995333731174469,
|
| 5503 |
+
"learning_rate": 2.592178770949721e-05,
|
| 5504 |
+
"loss": 0.8415,
|
| 5505 |
+
"step": 785
|
| 5506 |
+
},
|
| 5507 |
+
{
|
| 5508 |
+
"epoch": 4.366666666666666,
|
| 5509 |
+
"grad_norm": 0.06058323010802269,
|
| 5510 |
+
"learning_rate": 2.569832402234637e-05,
|
| 5511 |
+
"loss": 0.9729,
|
| 5512 |
+
"step": 786
|
| 5513 |
+
},
|
| 5514 |
+
{
|
| 5515 |
+
"epoch": 4.372222222222222,
|
| 5516 |
+
"grad_norm": 0.06180157512426376,
|
| 5517 |
+
"learning_rate": 2.547486033519553e-05,
|
| 5518 |
+
"loss": 0.8585,
|
| 5519 |
+
"step": 787
|
| 5520 |
+
},
|
| 5521 |
+
{
|
| 5522 |
+
"epoch": 4.377777777777778,
|
| 5523 |
+
"grad_norm": 0.07014794647693634,
|
| 5524 |
+
"learning_rate": 2.5251396648044694e-05,
|
| 5525 |
+
"loss": 0.7898,
|
| 5526 |
+
"step": 788
|
| 5527 |
+
},
|
| 5528 |
+
{
|
| 5529 |
+
"epoch": 4.383333333333334,
|
| 5530 |
+
"grad_norm": 0.06525201350450516,
|
| 5531 |
+
"learning_rate": 2.5027932960893856e-05,
|
| 5532 |
+
"loss": 0.8687,
|
| 5533 |
+
"step": 789
|
| 5534 |
+
},
|
| 5535 |
+
{
|
| 5536 |
+
"epoch": 4.388888888888889,
|
| 5537 |
+
"grad_norm": 0.07381299883127213,
|
| 5538 |
+
"learning_rate": 2.4804469273743018e-05,
|
| 5539 |
+
"loss": 0.7705,
|
| 5540 |
+
"step": 790
|
| 5541 |
+
},
|
| 5542 |
+
{
|
| 5543 |
+
"epoch": 4.394444444444445,
|
| 5544 |
+
"grad_norm": 0.06867001950740814,
|
| 5545 |
+
"learning_rate": 2.458100558659218e-05,
|
| 5546 |
+
"loss": 0.8818,
|
| 5547 |
+
"step": 791
|
| 5548 |
+
},
|
| 5549 |
+
{
|
| 5550 |
+
"epoch": 4.4,
|
| 5551 |
+
"grad_norm": 0.08557435870170593,
|
| 5552 |
+
"learning_rate": 2.4357541899441343e-05,
|
| 5553 |
+
"loss": 0.7607,
|
| 5554 |
+
"step": 792
|
| 5555 |
+
},
|
| 5556 |
+
{
|
| 5557 |
+
"epoch": 4.405555555555556,
|
| 5558 |
+
"grad_norm": 0.0627717450261116,
|
| 5559 |
+
"learning_rate": 2.4134078212290502e-05,
|
| 5560 |
+
"loss": 0.8485,
|
| 5561 |
+
"step": 793
|
| 5562 |
+
},
|
| 5563 |
+
{
|
| 5564 |
+
"epoch": 4.411111111111111,
|
| 5565 |
+
"grad_norm": 0.06512073427438736,
|
| 5566 |
+
"learning_rate": 2.3910614525139664e-05,
|
| 5567 |
+
"loss": 0.9801,
|
| 5568 |
+
"step": 794
|
| 5569 |
+
},
|
| 5570 |
+
{
|
| 5571 |
+
"epoch": 4.416666666666667,
|
| 5572 |
+
"grad_norm": 0.07923205196857452,
|
| 5573 |
+
"learning_rate": 2.3687150837988827e-05,
|
| 5574 |
+
"loss": 0.9049,
|
| 5575 |
+
"step": 795
|
| 5576 |
+
},
|
| 5577 |
+
{
|
| 5578 |
+
"epoch": 4.4222222222222225,
|
| 5579 |
+
"grad_norm": 0.06704343855381012,
|
| 5580 |
+
"learning_rate": 2.346368715083799e-05,
|
| 5581 |
+
"loss": 0.8302,
|
| 5582 |
+
"step": 796
|
| 5583 |
+
},
|
| 5584 |
+
{
|
| 5585 |
+
"epoch": 4.427777777777778,
|
| 5586 |
+
"grad_norm": 0.06392168998718262,
|
| 5587 |
+
"learning_rate": 2.324022346368715e-05,
|
| 5588 |
+
"loss": 0.7922,
|
| 5589 |
+
"step": 797
|
| 5590 |
+
},
|
| 5591 |
+
{
|
| 5592 |
+
"epoch": 4.433333333333334,
|
| 5593 |
+
"grad_norm": 0.06558392196893692,
|
| 5594 |
+
"learning_rate": 2.3016759776536314e-05,
|
| 5595 |
+
"loss": 0.8636,
|
| 5596 |
+
"step": 798
|
| 5597 |
+
},
|
| 5598 |
+
{
|
| 5599 |
+
"epoch": 4.438888888888889,
|
| 5600 |
+
"grad_norm": 0.06815050542354584,
|
| 5601 |
+
"learning_rate": 2.2793296089385476e-05,
|
| 5602 |
+
"loss": 0.836,
|
| 5603 |
+
"step": 799
|
| 5604 |
+
},
|
| 5605 |
+
{
|
| 5606 |
+
"epoch": 4.444444444444445,
|
| 5607 |
+
"grad_norm": 0.06234079599380493,
|
| 5608 |
+
"learning_rate": 2.2569832402234638e-05,
|
| 5609 |
+
"loss": 0.863,
|
| 5610 |
+
"step": 800
|
| 5611 |
+
},
|
| 5612 |
+
{
|
| 5613 |
+
"epoch": 4.45,
|
| 5614 |
+
"grad_norm": 0.060776371508836746,
|
| 5615 |
+
"learning_rate": 2.2346368715083797e-05,
|
| 5616 |
+
"loss": 0.768,
|
| 5617 |
+
"step": 801
|
| 5618 |
+
},
|
| 5619 |
+
{
|
| 5620 |
+
"epoch": 4.455555555555556,
|
| 5621 |
+
"grad_norm": 0.06404553353786469,
|
| 5622 |
+
"learning_rate": 2.212290502793296e-05,
|
| 5623 |
+
"loss": 0.8383,
|
| 5624 |
+
"step": 802
|
| 5625 |
+
},
|
| 5626 |
+
{
|
| 5627 |
+
"epoch": 4.461111111111111,
|
| 5628 |
+
"grad_norm": 0.0616544634103775,
|
| 5629 |
+
"learning_rate": 2.1899441340782122e-05,
|
| 5630 |
+
"loss": 0.9098,
|
| 5631 |
+
"step": 803
|
| 5632 |
+
},
|
| 5633 |
+
{
|
| 5634 |
+
"epoch": 4.466666666666667,
|
| 5635 |
+
"grad_norm": 0.06308100372552872,
|
| 5636 |
+
"learning_rate": 2.1675977653631284e-05,
|
| 5637 |
+
"loss": 0.838,
|
| 5638 |
+
"step": 804
|
| 5639 |
+
},
|
| 5640 |
+
{
|
| 5641 |
+
"epoch": 4.472222222222222,
|
| 5642 |
+
"grad_norm": 0.06575177609920502,
|
| 5643 |
+
"learning_rate": 2.1452513966480446e-05,
|
| 5644 |
+
"loss": 0.9344,
|
| 5645 |
+
"step": 805
|
| 5646 |
+
},
|
| 5647 |
+
{
|
| 5648 |
+
"epoch": 4.477777777777778,
|
| 5649 |
+
"grad_norm": 0.073246531188488,
|
| 5650 |
+
"learning_rate": 2.122905027932961e-05,
|
| 5651 |
+
"loss": 0.8616,
|
| 5652 |
+
"step": 806
|
| 5653 |
+
},
|
| 5654 |
+
{
|
| 5655 |
+
"epoch": 4.483333333333333,
|
| 5656 |
+
"grad_norm": 0.06593457609415054,
|
| 5657 |
+
"learning_rate": 2.100558659217877e-05,
|
| 5658 |
+
"loss": 0.7611,
|
| 5659 |
+
"step": 807
|
| 5660 |
+
},
|
| 5661 |
+
{
|
| 5662 |
+
"epoch": 4.488888888888889,
|
| 5663 |
+
"grad_norm": 0.06681575626134872,
|
| 5664 |
+
"learning_rate": 2.0782122905027933e-05,
|
| 5665 |
+
"loss": 0.913,
|
| 5666 |
+
"step": 808
|
| 5667 |
+
},
|
| 5668 |
+
{
|
| 5669 |
+
"epoch": 4.4944444444444445,
|
| 5670 |
+
"grad_norm": 0.07066091895103455,
|
| 5671 |
+
"learning_rate": 2.0558659217877096e-05,
|
| 5672 |
+
"loss": 0.8538,
|
| 5673 |
+
"step": 809
|
| 5674 |
+
},
|
| 5675 |
+
{
|
| 5676 |
+
"epoch": 4.5,
|
| 5677 |
+
"grad_norm": 0.08290861546993256,
|
| 5678 |
+
"learning_rate": 2.0335195530726258e-05,
|
| 5679 |
+
"loss": 0.8322,
|
| 5680 |
+
"step": 810
|
| 5681 |
+
},
|
| 5682 |
+
{
|
| 5683 |
+
"epoch": 4.5055555555555555,
|
| 5684 |
+
"grad_norm": 0.07008577138185501,
|
| 5685 |
+
"learning_rate": 2.011173184357542e-05,
|
| 5686 |
+
"loss": 0.7787,
|
| 5687 |
+
"step": 811
|
| 5688 |
+
},
|
| 5689 |
+
{
|
| 5690 |
+
"epoch": 4.511111111111111,
|
| 5691 |
+
"grad_norm": 0.05752347782254219,
|
| 5692 |
+
"learning_rate": 1.9888268156424583e-05,
|
| 5693 |
+
"loss": 0.8318,
|
| 5694 |
+
"step": 812
|
| 5695 |
+
},
|
| 5696 |
+
{
|
| 5697 |
+
"epoch": 4.516666666666667,
|
| 5698 |
+
"grad_norm": 0.06387963891029358,
|
| 5699 |
+
"learning_rate": 1.9664804469273742e-05,
|
| 5700 |
+
"loss": 0.9929,
|
| 5701 |
+
"step": 813
|
| 5702 |
+
},
|
| 5703 |
+
{
|
| 5704 |
+
"epoch": 4.522222222222222,
|
| 5705 |
+
"grad_norm": 0.07318349927663803,
|
| 5706 |
+
"learning_rate": 1.9441340782122904e-05,
|
| 5707 |
+
"loss": 0.7613,
|
| 5708 |
+
"step": 814
|
| 5709 |
+
},
|
| 5710 |
+
{
|
| 5711 |
+
"epoch": 4.527777777777778,
|
| 5712 |
+
"grad_norm": 0.06772953271865845,
|
| 5713 |
+
"learning_rate": 1.9217877094972066e-05,
|
| 5714 |
+
"loss": 0.8136,
|
| 5715 |
+
"step": 815
|
| 5716 |
+
},
|
| 5717 |
+
{
|
| 5718 |
+
"epoch": 4.533333333333333,
|
| 5719 |
+
"grad_norm": 0.07693421840667725,
|
| 5720 |
+
"learning_rate": 1.899441340782123e-05,
|
| 5721 |
+
"loss": 0.9406,
|
| 5722 |
+
"step": 816
|
| 5723 |
+
},
|
| 5724 |
+
{
|
| 5725 |
+
"epoch": 4.538888888888889,
|
| 5726 |
+
"grad_norm": 0.06594596058130264,
|
| 5727 |
+
"learning_rate": 1.877094972067039e-05,
|
| 5728 |
+
"loss": 0.8637,
|
| 5729 |
+
"step": 817
|
| 5730 |
+
},
|
| 5731 |
+
{
|
| 5732 |
+
"epoch": 4.544444444444444,
|
| 5733 |
+
"grad_norm": 0.06739532947540283,
|
| 5734 |
+
"learning_rate": 1.8547486033519553e-05,
|
| 5735 |
+
"loss": 0.9043,
|
| 5736 |
+
"step": 818
|
| 5737 |
+
},
|
| 5738 |
+
{
|
| 5739 |
+
"epoch": 4.55,
|
| 5740 |
+
"grad_norm": 0.07356348633766174,
|
| 5741 |
+
"learning_rate": 1.8324022346368716e-05,
|
| 5742 |
+
"loss": 0.9696,
|
| 5743 |
+
"step": 819
|
| 5744 |
+
},
|
| 5745 |
+
{
|
| 5746 |
+
"epoch": 4.555555555555555,
|
| 5747 |
+
"grad_norm": 0.07079844176769257,
|
| 5748 |
+
"learning_rate": 1.8100558659217878e-05,
|
| 5749 |
+
"loss": 0.9524,
|
| 5750 |
+
"step": 820
|
| 5751 |
+
},
|
| 5752 |
+
{
|
| 5753 |
+
"epoch": 4.561111111111111,
|
| 5754 |
+
"grad_norm": 0.07685678452253342,
|
| 5755 |
+
"learning_rate": 1.787709497206704e-05,
|
| 5756 |
+
"loss": 0.7816,
|
| 5757 |
+
"step": 821
|
| 5758 |
+
},
|
| 5759 |
+
{
|
| 5760 |
+
"epoch": 4.566666666666666,
|
| 5761 |
+
"grad_norm": 0.06801366806030273,
|
| 5762 |
+
"learning_rate": 1.7653631284916203e-05,
|
| 5763 |
+
"loss": 0.7547,
|
| 5764 |
+
"step": 822
|
| 5765 |
+
},
|
| 5766 |
+
{
|
| 5767 |
+
"epoch": 4.572222222222222,
|
| 5768 |
+
"grad_norm": 0.06588180363178253,
|
| 5769 |
+
"learning_rate": 1.7430167597765365e-05,
|
| 5770 |
+
"loss": 0.8329,
|
| 5771 |
+
"step": 823
|
| 5772 |
+
},
|
| 5773 |
+
{
|
| 5774 |
+
"epoch": 4.5777777777777775,
|
| 5775 |
+
"grad_norm": 0.06872644275426865,
|
| 5776 |
+
"learning_rate": 1.7206703910614527e-05,
|
| 5777 |
+
"loss": 0.8596,
|
| 5778 |
+
"step": 824
|
| 5779 |
+
},
|
| 5780 |
+
{
|
| 5781 |
+
"epoch": 4.583333333333333,
|
| 5782 |
+
"grad_norm": 0.07037709653377533,
|
| 5783 |
+
"learning_rate": 1.6983240223463686e-05,
|
| 5784 |
+
"loss": 0.8422,
|
| 5785 |
+
"step": 825
|
| 5786 |
+
},
|
| 5787 |
+
{
|
| 5788 |
+
"epoch": 4.588888888888889,
|
| 5789 |
+
"grad_norm": 0.05817841365933418,
|
| 5790 |
+
"learning_rate": 1.675977653631285e-05,
|
| 5791 |
+
"loss": 0.9255,
|
| 5792 |
+
"step": 826
|
| 5793 |
+
},
|
| 5794 |
+
{
|
| 5795 |
+
"epoch": 4.594444444444444,
|
| 5796 |
+
"grad_norm": 0.06256680935621262,
|
| 5797 |
+
"learning_rate": 1.653631284916201e-05,
|
| 5798 |
+
"loss": 0.9732,
|
| 5799 |
+
"step": 827
|
| 5800 |
+
},
|
| 5801 |
+
{
|
| 5802 |
+
"epoch": 4.6,
|
| 5803 |
+
"grad_norm": 0.06470783799886703,
|
| 5804 |
+
"learning_rate": 1.6312849162011173e-05,
|
| 5805 |
+
"loss": 1.0082,
|
| 5806 |
+
"step": 828
|
| 5807 |
+
},
|
| 5808 |
+
{
|
| 5809 |
+
"epoch": 4.605555555555555,
|
| 5810 |
+
"grad_norm": 0.06321726739406586,
|
| 5811 |
+
"learning_rate": 1.6089385474860336e-05,
|
| 5812 |
+
"loss": 0.7958,
|
| 5813 |
+
"step": 829
|
| 5814 |
+
},
|
| 5815 |
+
{
|
| 5816 |
+
"epoch": 4.611111111111111,
|
| 5817 |
+
"grad_norm": 0.06101881340146065,
|
| 5818 |
+
"learning_rate": 1.5865921787709498e-05,
|
| 5819 |
+
"loss": 0.9288,
|
| 5820 |
+
"step": 830
|
| 5821 |
+
},
|
| 5822 |
+
{
|
| 5823 |
+
"epoch": 4.616666666666667,
|
| 5824 |
+
"grad_norm": 0.061096347868442535,
|
| 5825 |
+
"learning_rate": 1.564245810055866e-05,
|
| 5826 |
+
"loss": 0.8065,
|
| 5827 |
+
"step": 831
|
| 5828 |
+
},
|
| 5829 |
+
{
|
| 5830 |
+
"epoch": 4.622222222222222,
|
| 5831 |
+
"grad_norm": 0.07092749327421188,
|
| 5832 |
+
"learning_rate": 1.5418994413407823e-05,
|
| 5833 |
+
"loss": 0.9874,
|
| 5834 |
+
"step": 832
|
| 5835 |
+
},
|
| 5836 |
+
{
|
| 5837 |
+
"epoch": 4.627777777777778,
|
| 5838 |
+
"grad_norm": 0.06334253400564194,
|
| 5839 |
+
"learning_rate": 1.5195530726256985e-05,
|
| 5840 |
+
"loss": 0.8788,
|
| 5841 |
+
"step": 833
|
| 5842 |
+
},
|
| 5843 |
+
{
|
| 5844 |
+
"epoch": 4.633333333333333,
|
| 5845 |
+
"grad_norm": 0.06317714601755142,
|
| 5846 |
+
"learning_rate": 1.4972067039106147e-05,
|
| 5847 |
+
"loss": 0.864,
|
| 5848 |
+
"step": 834
|
| 5849 |
+
},
|
| 5850 |
+
{
|
| 5851 |
+
"epoch": 4.638888888888889,
|
| 5852 |
+
"grad_norm": 0.06688254326581955,
|
| 5853 |
+
"learning_rate": 1.474860335195531e-05,
|
| 5854 |
+
"loss": 0.9415,
|
| 5855 |
+
"step": 835
|
| 5856 |
+
},
|
| 5857 |
+
{
|
| 5858 |
+
"epoch": 4.644444444444445,
|
| 5859 |
+
"grad_norm": 0.07368700951337814,
|
| 5860 |
+
"learning_rate": 1.4525139664804472e-05,
|
| 5861 |
+
"loss": 0.8026,
|
| 5862 |
+
"step": 836
|
| 5863 |
+
},
|
| 5864 |
+
{
|
| 5865 |
+
"epoch": 4.65,
|
| 5866 |
+
"grad_norm": 0.06767589598894119,
|
| 5867 |
+
"learning_rate": 1.4301675977653631e-05,
|
| 5868 |
+
"loss": 0.7902,
|
| 5869 |
+
"step": 837
|
| 5870 |
+
},
|
| 5871 |
+
{
|
| 5872 |
+
"epoch": 4.655555555555556,
|
| 5873 |
+
"grad_norm": 0.06546707451343536,
|
| 5874 |
+
"learning_rate": 1.4078212290502793e-05,
|
| 5875 |
+
"loss": 0.8749,
|
| 5876 |
+
"step": 838
|
| 5877 |
+
},
|
| 5878 |
+
{
|
| 5879 |
+
"epoch": 4.661111111111111,
|
| 5880 |
+
"grad_norm": 0.06208932027220726,
|
| 5881 |
+
"learning_rate": 1.3854748603351956e-05,
|
| 5882 |
+
"loss": 0.8563,
|
| 5883 |
+
"step": 839
|
| 5884 |
+
},
|
| 5885 |
+
{
|
| 5886 |
+
"epoch": 4.666666666666667,
|
| 5887 |
+
"grad_norm": 0.07527874410152435,
|
| 5888 |
+
"learning_rate": 1.3631284916201118e-05,
|
| 5889 |
+
"loss": 0.8443,
|
| 5890 |
+
"step": 840
|
| 5891 |
+
},
|
| 5892 |
+
{
|
| 5893 |
+
"epoch": 4.6722222222222225,
|
| 5894 |
+
"grad_norm": 0.06952167302370071,
|
| 5895 |
+
"learning_rate": 1.340782122905028e-05,
|
| 5896 |
+
"loss": 0.8012,
|
| 5897 |
+
"step": 841
|
| 5898 |
+
},
|
| 5899 |
+
{
|
| 5900 |
+
"epoch": 4.677777777777778,
|
| 5901 |
+
"grad_norm": 0.06032046675682068,
|
| 5902 |
+
"learning_rate": 1.3184357541899443e-05,
|
| 5903 |
+
"loss": 0.8254,
|
| 5904 |
+
"step": 842
|
| 5905 |
+
},
|
| 5906 |
+
{
|
| 5907 |
+
"epoch": 4.683333333333334,
|
| 5908 |
+
"grad_norm": 0.057982437312603,
|
| 5909 |
+
"learning_rate": 1.2960893854748605e-05,
|
| 5910 |
+
"loss": 0.9092,
|
| 5911 |
+
"step": 843
|
| 5912 |
+
},
|
| 5913 |
+
{
|
| 5914 |
+
"epoch": 4.688888888888889,
|
| 5915 |
+
"grad_norm": 0.07537980377674103,
|
| 5916 |
+
"learning_rate": 1.2737430167597766e-05,
|
| 5917 |
+
"loss": 0.8206,
|
| 5918 |
+
"step": 844
|
| 5919 |
+
},
|
| 5920 |
+
{
|
| 5921 |
+
"epoch": 4.694444444444445,
|
| 5922 |
+
"grad_norm": 0.0689520314335823,
|
| 5923 |
+
"learning_rate": 1.2513966480446928e-05,
|
| 5924 |
+
"loss": 0.8488,
|
| 5925 |
+
"step": 845
|
| 5926 |
+
},
|
| 5927 |
+
{
|
| 5928 |
+
"epoch": 4.7,
|
| 5929 |
+
"grad_norm": 0.0652664303779602,
|
| 5930 |
+
"learning_rate": 1.229050279329609e-05,
|
| 5931 |
+
"loss": 0.8944,
|
| 5932 |
+
"step": 846
|
| 5933 |
+
},
|
| 5934 |
+
{
|
| 5935 |
+
"epoch": 4.705555555555556,
|
| 5936 |
+
"grad_norm": 0.05868719518184662,
|
| 5937 |
+
"learning_rate": 1.2067039106145251e-05,
|
| 5938 |
+
"loss": 0.8372,
|
| 5939 |
+
"step": 847
|
| 5940 |
+
},
|
| 5941 |
+
{
|
| 5942 |
+
"epoch": 4.711111111111111,
|
| 5943 |
+
"grad_norm": 0.061663344502449036,
|
| 5944 |
+
"learning_rate": 1.1843575418994413e-05,
|
| 5945 |
+
"loss": 0.8319,
|
| 5946 |
+
"step": 848
|
| 5947 |
+
},
|
| 5948 |
+
{
|
| 5949 |
+
"epoch": 4.716666666666667,
|
| 5950 |
+
"grad_norm": 0.06301644444465637,
|
| 5951 |
+
"learning_rate": 1.1620111731843576e-05,
|
| 5952 |
+
"loss": 0.8292,
|
| 5953 |
+
"step": 849
|
| 5954 |
+
},
|
| 5955 |
+
{
|
| 5956 |
+
"epoch": 4.722222222222222,
|
| 5957 |
+
"grad_norm": 0.06448386609554291,
|
| 5958 |
+
"learning_rate": 1.1396648044692738e-05,
|
| 5959 |
+
"loss": 0.9751,
|
| 5960 |
+
"step": 850
|
| 5961 |
+
},
|
| 5962 |
+
{
|
| 5963 |
+
"epoch": 4.727777777777778,
|
| 5964 |
+
"grad_norm": 0.06298605352640152,
|
| 5965 |
+
"learning_rate": 1.1173184357541899e-05,
|
| 5966 |
+
"loss": 0.9132,
|
| 5967 |
+
"step": 851
|
| 5968 |
+
},
|
| 5969 |
+
{
|
| 5970 |
+
"epoch": 4.733333333333333,
|
| 5971 |
+
"grad_norm": 0.0622861348092556,
|
| 5972 |
+
"learning_rate": 1.0949720670391061e-05,
|
| 5973 |
+
"loss": 0.851,
|
| 5974 |
+
"step": 852
|
| 5975 |
+
},
|
| 5976 |
+
{
|
| 5977 |
+
"epoch": 4.738888888888889,
|
| 5978 |
+
"grad_norm": 0.07923610508441925,
|
| 5979 |
+
"learning_rate": 1.0726256983240223e-05,
|
| 5980 |
+
"loss": 0.8115,
|
| 5981 |
+
"step": 853
|
| 5982 |
+
},
|
| 5983 |
+
{
|
| 5984 |
+
"epoch": 4.7444444444444445,
|
| 5985 |
+
"grad_norm": 0.06684275716543198,
|
| 5986 |
+
"learning_rate": 1.0502793296089386e-05,
|
| 5987 |
+
"loss": 0.8796,
|
| 5988 |
+
"step": 854
|
| 5989 |
+
},
|
| 5990 |
+
{
|
| 5991 |
+
"epoch": 4.75,
|
| 5992 |
+
"grad_norm": 0.058858904987573624,
|
| 5993 |
+
"learning_rate": 1.0279329608938548e-05,
|
| 5994 |
+
"loss": 0.786,
|
| 5995 |
+
"step": 855
|
| 5996 |
+
},
|
| 5997 |
+
{
|
| 5998 |
+
"epoch": 4.7555555555555555,
|
| 5999 |
+
"grad_norm": 0.05823403596878052,
|
| 6000 |
+
"learning_rate": 1.005586592178771e-05,
|
| 6001 |
+
"loss": 0.9475,
|
| 6002 |
+
"step": 856
|
| 6003 |
+
},
|
| 6004 |
+
{
|
| 6005 |
+
"epoch": 4.761111111111111,
|
| 6006 |
+
"grad_norm": 0.06352592259645462,
|
| 6007 |
+
"learning_rate": 9.832402234636871e-06,
|
| 6008 |
+
"loss": 0.8574,
|
| 6009 |
+
"step": 857
|
| 6010 |
+
},
|
| 6011 |
+
{
|
| 6012 |
+
"epoch": 4.766666666666667,
|
| 6013 |
+
"grad_norm": 0.05929254740476608,
|
| 6014 |
+
"learning_rate": 9.608938547486033e-06,
|
| 6015 |
+
"loss": 0.7926,
|
| 6016 |
+
"step": 858
|
| 6017 |
+
},
|
| 6018 |
+
{
|
| 6019 |
+
"epoch": 4.772222222222222,
|
| 6020 |
+
"grad_norm": 0.07241322845220566,
|
| 6021 |
+
"learning_rate": 9.385474860335196e-06,
|
| 6022 |
+
"loss": 0.8635,
|
| 6023 |
+
"step": 859
|
| 6024 |
+
},
|
| 6025 |
+
{
|
| 6026 |
+
"epoch": 4.777777777777778,
|
| 6027 |
+
"grad_norm": 0.066829152405262,
|
| 6028 |
+
"learning_rate": 9.162011173184358e-06,
|
| 6029 |
+
"loss": 0.8275,
|
| 6030 |
+
"step": 860
|
| 6031 |
+
},
|
| 6032 |
+
{
|
| 6033 |
+
"epoch": 4.783333333333333,
|
| 6034 |
+
"grad_norm": 0.05805254727602005,
|
| 6035 |
+
"learning_rate": 8.93854748603352e-06,
|
| 6036 |
+
"loss": 0.9695,
|
| 6037 |
+
"step": 861
|
| 6038 |
+
},
|
| 6039 |
+
{
|
| 6040 |
+
"epoch": 4.788888888888889,
|
| 6041 |
+
"grad_norm": 0.06599583476781845,
|
| 6042 |
+
"learning_rate": 8.715083798882683e-06,
|
| 6043 |
+
"loss": 0.9317,
|
| 6044 |
+
"step": 862
|
| 6045 |
+
},
|
| 6046 |
+
{
|
| 6047 |
+
"epoch": 4.794444444444444,
|
| 6048 |
+
"grad_norm": 0.0690189003944397,
|
| 6049 |
+
"learning_rate": 8.491620111731843e-06,
|
| 6050 |
+
"loss": 0.719,
|
| 6051 |
+
"step": 863
|
| 6052 |
+
},
|
| 6053 |
+
{
|
| 6054 |
+
"epoch": 4.8,
|
| 6055 |
+
"grad_norm": 0.0718541145324707,
|
| 6056 |
+
"learning_rate": 8.268156424581006e-06,
|
| 6057 |
+
"loss": 0.9752,
|
| 6058 |
+
"step": 864
|
| 6059 |
+
},
|
| 6060 |
+
{
|
| 6061 |
+
"epoch": 4.805555555555555,
|
| 6062 |
+
"grad_norm": 0.06498520821332932,
|
| 6063 |
+
"learning_rate": 8.044692737430168e-06,
|
| 6064 |
+
"loss": 0.8896,
|
| 6065 |
+
"step": 865
|
| 6066 |
+
},
|
| 6067 |
+
{
|
| 6068 |
+
"epoch": 4.811111111111111,
|
| 6069 |
+
"grad_norm": 0.06741782277822495,
|
| 6070 |
+
"learning_rate": 7.82122905027933e-06,
|
| 6071 |
+
"loss": 0.7783,
|
| 6072 |
+
"step": 866
|
| 6073 |
+
},
|
| 6074 |
+
{
|
| 6075 |
+
"epoch": 4.816666666666666,
|
| 6076 |
+
"grad_norm": 0.05603065341711044,
|
| 6077 |
+
"learning_rate": 7.5977653631284925e-06,
|
| 6078 |
+
"loss": 0.7643,
|
| 6079 |
+
"step": 867
|
| 6080 |
+
},
|
| 6081 |
+
{
|
| 6082 |
+
"epoch": 4.822222222222222,
|
| 6083 |
+
"grad_norm": 0.07300734519958496,
|
| 6084 |
+
"learning_rate": 7.374301675977655e-06,
|
| 6085 |
+
"loss": 0.8683,
|
| 6086 |
+
"step": 868
|
| 6087 |
+
},
|
| 6088 |
+
{
|
| 6089 |
+
"epoch": 4.8277777777777775,
|
| 6090 |
+
"grad_norm": 0.06486313790082932,
|
| 6091 |
+
"learning_rate": 7.1508379888268155e-06,
|
| 6092 |
+
"loss": 0.9376,
|
| 6093 |
+
"step": 869
|
| 6094 |
+
},
|
| 6095 |
+
{
|
| 6096 |
+
"epoch": 4.833333333333333,
|
| 6097 |
+
"grad_norm": 0.06596938520669937,
|
| 6098 |
+
"learning_rate": 6.927374301675978e-06,
|
| 6099 |
+
"loss": 0.7932,
|
| 6100 |
+
"step": 870
|
| 6101 |
+
},
|
| 6102 |
+
{
|
| 6103 |
+
"epoch": 4.838888888888889,
|
| 6104 |
+
"grad_norm": 0.06245023012161255,
|
| 6105 |
+
"learning_rate": 6.70391061452514e-06,
|
| 6106 |
+
"loss": 0.7901,
|
| 6107 |
+
"step": 871
|
| 6108 |
+
},
|
| 6109 |
+
{
|
| 6110 |
+
"epoch": 4.844444444444444,
|
| 6111 |
+
"grad_norm": 0.058026690036058426,
|
| 6112 |
+
"learning_rate": 6.4804469273743025e-06,
|
| 6113 |
+
"loss": 0.7906,
|
| 6114 |
+
"step": 872
|
| 6115 |
+
},
|
| 6116 |
+
{
|
| 6117 |
+
"epoch": 4.85,
|
| 6118 |
+
"grad_norm": 0.07333751022815704,
|
| 6119 |
+
"learning_rate": 6.256983240223464e-06,
|
| 6120 |
+
"loss": 0.7123,
|
| 6121 |
+
"step": 873
|
| 6122 |
+
},
|
| 6123 |
+
{
|
| 6124 |
+
"epoch": 4.855555555555555,
|
| 6125 |
+
"grad_norm": 0.0697869285941124,
|
| 6126 |
+
"learning_rate": 6.0335195530726255e-06,
|
| 6127 |
+
"loss": 0.8697,
|
| 6128 |
+
"step": 874
|
| 6129 |
+
},
|
| 6130 |
+
{
|
| 6131 |
+
"epoch": 4.861111111111111,
|
| 6132 |
+
"grad_norm": 0.06364396214485168,
|
| 6133 |
+
"learning_rate": 5.810055865921788e-06,
|
| 6134 |
+
"loss": 0.9212,
|
| 6135 |
+
"step": 875
|
| 6136 |
+
},
|
| 6137 |
+
{
|
| 6138 |
+
"epoch": 4.866666666666667,
|
| 6139 |
+
"grad_norm": 0.07756305485963821,
|
| 6140 |
+
"learning_rate": 5.586592178770949e-06,
|
| 6141 |
+
"loss": 0.7329,
|
| 6142 |
+
"step": 876
|
| 6143 |
+
},
|
| 6144 |
+
{
|
| 6145 |
+
"epoch": 4.872222222222222,
|
| 6146 |
+
"grad_norm": 0.07673313468694687,
|
| 6147 |
+
"learning_rate": 5.363128491620112e-06,
|
| 6148 |
+
"loss": 0.9544,
|
| 6149 |
+
"step": 877
|
| 6150 |
+
},
|
| 6151 |
+
{
|
| 6152 |
+
"epoch": 4.877777777777778,
|
| 6153 |
+
"grad_norm": 0.06823701411485672,
|
| 6154 |
+
"learning_rate": 5.139664804469274e-06,
|
| 6155 |
+
"loss": 0.8417,
|
| 6156 |
+
"step": 878
|
| 6157 |
+
},
|
| 6158 |
+
{
|
| 6159 |
+
"epoch": 4.883333333333333,
|
| 6160 |
+
"grad_norm": 0.06566136330366135,
|
| 6161 |
+
"learning_rate": 4.9162011173184354e-06,
|
| 6162 |
+
"loss": 0.9043,
|
| 6163 |
+
"step": 879
|
| 6164 |
+
},
|
| 6165 |
+
{
|
| 6166 |
+
"epoch": 4.888888888888889,
|
| 6167 |
+
"grad_norm": 0.07561615109443665,
|
| 6168 |
+
"learning_rate": 4.692737430167598e-06,
|
| 6169 |
+
"loss": 0.9289,
|
| 6170 |
+
"step": 880
|
| 6171 |
+
},
|
| 6172 |
+
{
|
| 6173 |
+
"epoch": 4.894444444444445,
|
| 6174 |
+
"grad_norm": 0.0714927464723587,
|
| 6175 |
+
"learning_rate": 4.46927374301676e-06,
|
| 6176 |
+
"loss": 0.9992,
|
| 6177 |
+
"step": 881
|
| 6178 |
+
},
|
| 6179 |
+
{
|
| 6180 |
+
"epoch": 4.9,
|
| 6181 |
+
"grad_norm": 0.07412128895521164,
|
| 6182 |
+
"learning_rate": 4.245810055865922e-06,
|
| 6183 |
+
"loss": 0.8937,
|
| 6184 |
+
"step": 882
|
| 6185 |
+
},
|
| 6186 |
+
{
|
| 6187 |
+
"epoch": 4.905555555555556,
|
| 6188 |
+
"grad_norm": 0.06256308406591415,
|
| 6189 |
+
"learning_rate": 4.022346368715084e-06,
|
| 6190 |
+
"loss": 0.9327,
|
| 6191 |
+
"step": 883
|
| 6192 |
+
},
|
| 6193 |
+
{
|
| 6194 |
+
"epoch": 4.911111111111111,
|
| 6195 |
+
"grad_norm": 0.06401494145393372,
|
| 6196 |
+
"learning_rate": 3.7988826815642463e-06,
|
| 6197 |
+
"loss": 0.9113,
|
| 6198 |
+
"step": 884
|
| 6199 |
+
},
|
| 6200 |
+
{
|
| 6201 |
+
"epoch": 4.916666666666667,
|
| 6202 |
+
"grad_norm": 0.06480543315410614,
|
| 6203 |
+
"learning_rate": 3.5754189944134077e-06,
|
| 6204 |
+
"loss": 0.8912,
|
| 6205 |
+
"step": 885
|
| 6206 |
+
},
|
| 6207 |
+
{
|
| 6208 |
+
"epoch": 4.9222222222222225,
|
| 6209 |
+
"grad_norm": 0.06543062627315521,
|
| 6210 |
+
"learning_rate": 3.35195530726257e-06,
|
| 6211 |
+
"loss": 0.8768,
|
| 6212 |
+
"step": 886
|
| 6213 |
+
},
|
| 6214 |
+
{
|
| 6215 |
+
"epoch": 4.927777777777778,
|
| 6216 |
+
"grad_norm": 0.057017501443624496,
|
| 6217 |
+
"learning_rate": 3.128491620111732e-06,
|
| 6218 |
+
"loss": 0.7779,
|
| 6219 |
+
"step": 887
|
| 6220 |
+
},
|
| 6221 |
+
{
|
| 6222 |
+
"epoch": 4.933333333333334,
|
| 6223 |
+
"grad_norm": 0.0745752677321434,
|
| 6224 |
+
"learning_rate": 2.905027932960894e-06,
|
| 6225 |
+
"loss": 0.8376,
|
| 6226 |
+
"step": 888
|
| 6227 |
+
},
|
| 6228 |
+
{
|
| 6229 |
+
"epoch": 4.938888888888889,
|
| 6230 |
+
"grad_norm": 0.06534884124994278,
|
| 6231 |
+
"learning_rate": 2.681564245810056e-06,
|
| 6232 |
+
"loss": 0.8647,
|
| 6233 |
+
"step": 889
|
| 6234 |
+
},
|
| 6235 |
+
{
|
| 6236 |
+
"epoch": 4.944444444444445,
|
| 6237 |
+
"grad_norm": 0.07408112287521362,
|
| 6238 |
+
"learning_rate": 2.4581005586592177e-06,
|
| 6239 |
+
"loss": 0.8212,
|
| 6240 |
+
"step": 890
|
| 6241 |
+
},
|
| 6242 |
+
{
|
| 6243 |
+
"epoch": 4.95,
|
| 6244 |
+
"grad_norm": 0.06152744218707085,
|
| 6245 |
+
"learning_rate": 2.23463687150838e-06,
|
| 6246 |
+
"loss": 0.9138,
|
| 6247 |
+
"step": 891
|
| 6248 |
+
},
|
| 6249 |
+
{
|
| 6250 |
+
"epoch": 4.955555555555556,
|
| 6251 |
+
"grad_norm": 0.06905697286128998,
|
| 6252 |
+
"learning_rate": 2.011173184357542e-06,
|
| 6253 |
+
"loss": 0.9246,
|
| 6254 |
+
"step": 892
|
| 6255 |
+
},
|
| 6256 |
+
{
|
| 6257 |
+
"epoch": 4.961111111111111,
|
| 6258 |
+
"grad_norm": 0.06602156907320023,
|
| 6259 |
+
"learning_rate": 1.7877094972067039e-06,
|
| 6260 |
+
"loss": 0.8739,
|
| 6261 |
+
"step": 893
|
| 6262 |
+
},
|
| 6263 |
+
{
|
| 6264 |
+
"epoch": 4.966666666666667,
|
| 6265 |
+
"grad_norm": 0.08502914756536484,
|
| 6266 |
+
"learning_rate": 1.564245810055866e-06,
|
| 6267 |
+
"loss": 0.7435,
|
| 6268 |
+
"step": 894
|
| 6269 |
+
},
|
| 6270 |
+
{
|
| 6271 |
+
"epoch": 4.972222222222222,
|
| 6272 |
+
"grad_norm": 0.06289546936750412,
|
| 6273 |
+
"learning_rate": 1.340782122905028e-06,
|
| 6274 |
+
"loss": 0.864,
|
| 6275 |
+
"step": 895
|
| 6276 |
+
},
|
| 6277 |
+
{
|
| 6278 |
+
"epoch": 4.977777777777778,
|
| 6279 |
+
"grad_norm": 0.05942307412624359,
|
| 6280 |
+
"learning_rate": 1.11731843575419e-06,
|
| 6281 |
+
"loss": 0.8965,
|
| 6282 |
+
"step": 896
|
| 6283 |
+
},
|
| 6284 |
+
{
|
| 6285 |
+
"epoch": 4.983333333333333,
|
| 6286 |
+
"grad_norm": 0.056906238198280334,
|
| 6287 |
+
"learning_rate": 8.938547486033519e-07,
|
| 6288 |
+
"loss": 0.8629,
|
| 6289 |
+
"step": 897
|
| 6290 |
+
},
|
| 6291 |
+
{
|
| 6292 |
+
"epoch": 4.988888888888889,
|
| 6293 |
+
"grad_norm": 0.06804513931274414,
|
| 6294 |
+
"learning_rate": 6.70391061452514e-07,
|
| 6295 |
+
"loss": 0.7078,
|
| 6296 |
+
"step": 898
|
| 6297 |
+
},
|
| 6298 |
+
{
|
| 6299 |
+
"epoch": 4.9944444444444445,
|
| 6300 |
+
"grad_norm": 0.05733992159366608,
|
| 6301 |
+
"learning_rate": 4.4692737430167597e-07,
|
| 6302 |
+
"loss": 0.882,
|
| 6303 |
+
"step": 899
|
| 6304 |
+
},
|
| 6305 |
+
{
|
| 6306 |
+
"epoch": 5.0,
|
| 6307 |
+
"grad_norm": 0.05951790511608124,
|
| 6308 |
+
"learning_rate": 2.2346368715083798e-07,
|
| 6309 |
+
"loss": 0.8055,
|
| 6310 |
+
"step": 900
|
| 6311 |
}
|
| 6312 |
],
|
| 6313 |
"logging_steps": 1,
|
|
|
|
| 6322 |
"should_evaluate": false,
|
| 6323 |
"should_log": false,
|
| 6324 |
"should_save": true,
|
| 6325 |
+
"should_training_stop": true
|
| 6326 |
},
|
| 6327 |
"attributes": {}
|
| 6328 |
}
|
| 6329 |
},
|
| 6330 |
+
"total_flos": 9.924638745821184e+18,
|
| 6331 |
"train_batch_size": 8,
|
| 6332 |
"trial_name": null,
|
| 6333 |
"trial_params": null
|