Training in progress, step 732, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3582214344
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4ac9db2df3fcec7d21404d7e78a9458f89ac255187bdec9999e86bb6ffd3501b
|
| 3 |
size 3582214344
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 5116251049
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c082f0202bd71bbc965f23b88a35d3b091aeae31fa71ac7937d425c593e70a94
|
| 3 |
size 5116251049
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14645
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d569a9c96ff4e89d32dfe8ca0b97952cc2edcd76f89c14caa7e57238c4b46c8a
|
| 3 |
size 14645
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1465
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:180cffdfabbf3d5cc6612d26a0cdfcdae1ae274eeb15bb76130b59c9aebd0caf
|
| 3 |
size 1465
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 0.
|
| 6 |
"eval_steps": 244,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -3440,6 +3440,1722 @@
|
|
| 3440 |
"eval_samples_per_second": 39.134,
|
| 3441 |
"eval_steps_per_second": 9.784,
|
| 3442 |
"step": 488
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3443 |
}
|
| 3444 |
],
|
| 3445 |
"logging_steps": 1,
|
|
@@ -3459,7 +5175,7 @@
|
|
| 3459 |
"attributes": {}
|
| 3460 |
}
|
| 3461 |
},
|
| 3462 |
-
"total_flos": 1.
|
| 3463 |
"train_batch_size": 4,
|
| 3464 |
"trial_name": null,
|
| 3465 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 0.6006153846153847,
|
| 6 |
"eval_steps": 244,
|
| 7 |
+
"global_step": 732,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 3440 |
"eval_samples_per_second": 39.134,
|
| 3441 |
"eval_steps_per_second": 9.784,
|
| 3442 |
"step": 488
|
| 3443 |
+
},
|
| 3444 |
+
{
|
| 3445 |
+
"epoch": 0.4012307692307692,
|
| 3446 |
+
"grad_norm": 1.671875,
|
| 3447 |
+
"learning_rate": 6.726573305245926e-06,
|
| 3448 |
+
"loss": 1.1119024753570557,
|
| 3449 |
+
"step": 489
|
| 3450 |
+
},
|
| 3451 |
+
{
|
| 3452 |
+
"epoch": 0.40205128205128204,
|
| 3453 |
+
"grad_norm": 1.7421875,
|
| 3454 |
+
"learning_rate": 6.714220844861011e-06,
|
| 3455 |
+
"loss": 1.0621892213821411,
|
| 3456 |
+
"step": 490
|
| 3457 |
+
},
|
| 3458 |
+
{
|
| 3459 |
+
"epoch": 0.40287179487179486,
|
| 3460 |
+
"grad_norm": 1.984375,
|
| 3461 |
+
"learning_rate": 6.701856517020565e-06,
|
| 3462 |
+
"loss": 1.1611796617507935,
|
| 3463 |
+
"step": 491
|
| 3464 |
+
},
|
| 3465 |
+
{
|
| 3466 |
+
"epoch": 0.4036923076923077,
|
| 3467 |
+
"grad_norm": 1.59375,
|
| 3468 |
+
"learning_rate": 6.689480407322142e-06,
|
| 3469 |
+
"loss": 1.0948337316513062,
|
| 3470 |
+
"step": 492
|
| 3471 |
+
},
|
| 3472 |
+
{
|
| 3473 |
+
"epoch": 0.4045128205128205,
|
| 3474 |
+
"grad_norm": 1.765625,
|
| 3475 |
+
"learning_rate": 6.677092601444858e-06,
|
| 3476 |
+
"loss": 1.0776751041412354,
|
| 3477 |
+
"step": 493
|
| 3478 |
+
},
|
| 3479 |
+
{
|
| 3480 |
+
"epoch": 0.4053333333333333,
|
| 3481 |
+
"grad_norm": 1.65625,
|
| 3482 |
+
"learning_rate": 6.664693185148808e-06,
|
| 3483 |
+
"loss": 1.0744000673294067,
|
| 3484 |
+
"step": 494
|
| 3485 |
+
},
|
| 3486 |
+
{
|
| 3487 |
+
"epoch": 0.40615384615384614,
|
| 3488 |
+
"grad_norm": 1.6484375,
|
| 3489 |
+
"learning_rate": 6.652282244274456e-06,
|
| 3490 |
+
"loss": 1.1028810739517212,
|
| 3491 |
+
"step": 495
|
| 3492 |
+
},
|
| 3493 |
+
{
|
| 3494 |
+
"epoch": 0.40697435897435896,
|
| 3495 |
+
"grad_norm": 1.7109375,
|
| 3496 |
+
"learning_rate": 6.639859864742058e-06,
|
| 3497 |
+
"loss": 1.1998472213745117,
|
| 3498 |
+
"step": 496
|
| 3499 |
+
},
|
| 3500 |
+
{
|
| 3501 |
+
"epoch": 0.4077948717948718,
|
| 3502 |
+
"grad_norm": 1.71875,
|
| 3503 |
+
"learning_rate": 6.627426132551059e-06,
|
| 3504 |
+
"loss": 1.123539686203003,
|
| 3505 |
+
"step": 497
|
| 3506 |
+
},
|
| 3507 |
+
{
|
| 3508 |
+
"epoch": 0.4086153846153846,
|
| 3509 |
+
"grad_norm": 1.546875,
|
| 3510 |
+
"learning_rate": 6.614981133779491e-06,
|
| 3511 |
+
"loss": 1.1190646886825562,
|
| 3512 |
+
"step": 498
|
| 3513 |
+
},
|
| 3514 |
+
{
|
| 3515 |
+
"epoch": 0.4094358974358974,
|
| 3516 |
+
"grad_norm": 1.78125,
|
| 3517 |
+
"learning_rate": 6.602524954583391e-06,
|
| 3518 |
+
"loss": 1.0418647527694702,
|
| 3519 |
+
"step": 499
|
| 3520 |
+
},
|
| 3521 |
+
{
|
| 3522 |
+
"epoch": 0.41025641025641024,
|
| 3523 |
+
"grad_norm": 1.7890625,
|
| 3524 |
+
"learning_rate": 6.590057681196191e-06,
|
| 3525 |
+
"loss": 1.1610432863235474,
|
| 3526 |
+
"step": 500
|
| 3527 |
+
},
|
| 3528 |
+
{
|
| 3529 |
+
"epoch": 0.41107692307692306,
|
| 3530 |
+
"grad_norm": 1.828125,
|
| 3531 |
+
"learning_rate": 6.5775793999281345e-06,
|
| 3532 |
+
"loss": 1.106255054473877,
|
| 3533 |
+
"step": 501
|
| 3534 |
+
},
|
| 3535 |
+
{
|
| 3536 |
+
"epoch": 0.4118974358974359,
|
| 3537 |
+
"grad_norm": 1.75,
|
| 3538 |
+
"learning_rate": 6.565090197165668e-06,
|
| 3539 |
+
"loss": 1.1458969116210938,
|
| 3540 |
+
"step": 502
|
| 3541 |
+
},
|
| 3542 |
+
{
|
| 3543 |
+
"epoch": 0.4127179487179487,
|
| 3544 |
+
"grad_norm": 1.78125,
|
| 3545 |
+
"learning_rate": 6.5525901593708456e-06,
|
| 3546 |
+
"loss": 1.1245545148849487,
|
| 3547 |
+
"step": 503
|
| 3548 |
+
},
|
| 3549 |
+
{
|
| 3550 |
+
"epoch": 0.4135384615384615,
|
| 3551 |
+
"grad_norm": 1.6171875,
|
| 3552 |
+
"learning_rate": 6.540079373080734e-06,
|
| 3553 |
+
"loss": 1.1054561138153076,
|
| 3554 |
+
"step": 504
|
| 3555 |
+
},
|
| 3556 |
+
{
|
| 3557 |
+
"epoch": 0.41435897435897434,
|
| 3558 |
+
"grad_norm": 1.8515625,
|
| 3559 |
+
"learning_rate": 6.527557924906811e-06,
|
| 3560 |
+
"loss": 1.2090294361114502,
|
| 3561 |
+
"step": 505
|
| 3562 |
+
},
|
| 3563 |
+
{
|
| 3564 |
+
"epoch": 0.41517948717948716,
|
| 3565 |
+
"grad_norm": 1.859375,
|
| 3566 |
+
"learning_rate": 6.515025901534364e-06,
|
| 3567 |
+
"loss": 1.1108143329620361,
|
| 3568 |
+
"step": 506
|
| 3569 |
+
},
|
| 3570 |
+
{
|
| 3571 |
+
"epoch": 0.416,
|
| 3572 |
+
"grad_norm": 1.734375,
|
| 3573 |
+
"learning_rate": 6.502483389721897e-06,
|
| 3574 |
+
"loss": 1.099368691444397,
|
| 3575 |
+
"step": 507
|
| 3576 |
+
},
|
| 3577 |
+
{
|
| 3578 |
+
"epoch": 0.4168205128205128,
|
| 3579 |
+
"grad_norm": 1.828125,
|
| 3580 |
+
"learning_rate": 6.489930476300519e-06,
|
| 3581 |
+
"loss": 1.118217945098877,
|
| 3582 |
+
"step": 508
|
| 3583 |
+
},
|
| 3584 |
+
{
|
| 3585 |
+
"epoch": 0.4176410256410256,
|
| 3586 |
+
"grad_norm": 1.7578125,
|
| 3587 |
+
"learning_rate": 6.477367248173352e-06,
|
| 3588 |
+
"loss": 1.1171270608901978,
|
| 3589 |
+
"step": 509
|
| 3590 |
+
},
|
| 3591 |
+
{
|
| 3592 |
+
"epoch": 0.41846153846153844,
|
| 3593 |
+
"grad_norm": 1.84375,
|
| 3594 |
+
"learning_rate": 6.4647937923149225e-06,
|
| 3595 |
+
"loss": 1.1200737953186035,
|
| 3596 |
+
"step": 510
|
| 3597 |
+
},
|
| 3598 |
+
{
|
| 3599 |
+
"epoch": 0.41928205128205126,
|
| 3600 |
+
"grad_norm": 1.859375,
|
| 3601 |
+
"learning_rate": 6.452210195770571e-06,
|
| 3602 |
+
"loss": 1.1528797149658203,
|
| 3603 |
+
"step": 511
|
| 3604 |
+
},
|
| 3605 |
+
{
|
| 3606 |
+
"epoch": 0.4201025641025641,
|
| 3607 |
+
"grad_norm": 1.7578125,
|
| 3608 |
+
"learning_rate": 6.439616545655833e-06,
|
| 3609 |
+
"loss": 1.093102216720581,
|
| 3610 |
+
"step": 512
|
| 3611 |
+
},
|
| 3612 |
+
{
|
| 3613 |
+
"epoch": 0.4209230769230769,
|
| 3614 |
+
"grad_norm": 1.9609375,
|
| 3615 |
+
"learning_rate": 6.427012929155847e-06,
|
| 3616 |
+
"loss": 1.1121059656143188,
|
| 3617 |
+
"step": 513
|
| 3618 |
+
},
|
| 3619 |
+
{
|
| 3620 |
+
"epoch": 0.4217435897435897,
|
| 3621 |
+
"grad_norm": 1.890625,
|
| 3622 |
+
"learning_rate": 6.414399433524752e-06,
|
| 3623 |
+
"loss": 1.1598759889602661,
|
| 3624 |
+
"step": 514
|
| 3625 |
+
},
|
| 3626 |
+
{
|
| 3627 |
+
"epoch": 0.42256410256410254,
|
| 3628 |
+
"grad_norm": 1.6953125,
|
| 3629 |
+
"learning_rate": 6.4017761460850725e-06,
|
| 3630 |
+
"loss": 1.1535965204238892,
|
| 3631 |
+
"step": 515
|
| 3632 |
+
},
|
| 3633 |
+
{
|
| 3634 |
+
"epoch": 0.42338461538461536,
|
| 3635 |
+
"grad_norm": 1.703125,
|
| 3636 |
+
"learning_rate": 6.389143154227128e-06,
|
| 3637 |
+
"loss": 1.1928433179855347,
|
| 3638 |
+
"step": 516
|
| 3639 |
+
},
|
| 3640 |
+
{
|
| 3641 |
+
"epoch": 0.4242051282051282,
|
| 3642 |
+
"grad_norm": 1.640625,
|
| 3643 |
+
"learning_rate": 6.376500545408416e-06,
|
| 3644 |
+
"loss": 1.0732297897338867,
|
| 3645 |
+
"step": 517
|
| 3646 |
+
},
|
| 3647 |
+
{
|
| 3648 |
+
"epoch": 0.425025641025641,
|
| 3649 |
+
"grad_norm": 1.859375,
|
| 3650 |
+
"learning_rate": 6.363848407153017e-06,
|
| 3651 |
+
"loss": 1.1649538278579712,
|
| 3652 |
+
"step": 518
|
| 3653 |
+
},
|
| 3654 |
+
{
|
| 3655 |
+
"epoch": 0.4258461538461538,
|
| 3656 |
+
"grad_norm": 1.6328125,
|
| 3657 |
+
"learning_rate": 6.351186827050977e-06,
|
| 3658 |
+
"loss": 1.0468311309814453,
|
| 3659 |
+
"step": 519
|
| 3660 |
+
},
|
| 3661 |
+
{
|
| 3662 |
+
"epoch": 0.4266666666666667,
|
| 3663 |
+
"grad_norm": 1.6171875,
|
| 3664 |
+
"learning_rate": 6.338515892757712e-06,
|
| 3665 |
+
"loss": 1.0932461023330688,
|
| 3666 |
+
"step": 520
|
| 3667 |
+
},
|
| 3668 |
+
{
|
| 3669 |
+
"epoch": 0.4274871794871795,
|
| 3670 |
+
"grad_norm": 2.015625,
|
| 3671 |
+
"learning_rate": 6.325835691993394e-06,
|
| 3672 |
+
"loss": 1.139062523841858,
|
| 3673 |
+
"step": 521
|
| 3674 |
+
},
|
| 3675 |
+
{
|
| 3676 |
+
"epoch": 0.42830769230769233,
|
| 3677 |
+
"grad_norm": 1.8125,
|
| 3678 |
+
"learning_rate": 6.31314631254235e-06,
|
| 3679 |
+
"loss": 1.0779269933700562,
|
| 3680 |
+
"step": 522
|
| 3681 |
+
},
|
| 3682 |
+
{
|
| 3683 |
+
"epoch": 0.42912820512820515,
|
| 3684 |
+
"grad_norm": 1.8046875,
|
| 3685 |
+
"learning_rate": 6.300447842252445e-06,
|
| 3686 |
+
"loss": 1.1887539625167847,
|
| 3687 |
+
"step": 523
|
| 3688 |
+
},
|
| 3689 |
+
{
|
| 3690 |
+
"epoch": 0.429948717948718,
|
| 3691 |
+
"grad_norm": 1.7421875,
|
| 3692 |
+
"learning_rate": 6.2877403690344854e-06,
|
| 3693 |
+
"loss": 1.1249791383743286,
|
| 3694 |
+
"step": 524
|
| 3695 |
+
},
|
| 3696 |
+
{
|
| 3697 |
+
"epoch": 0.4307692307692308,
|
| 3698 |
+
"grad_norm": 1.859375,
|
| 3699 |
+
"learning_rate": 6.275023980861596e-06,
|
| 3700 |
+
"loss": 1.0031733512878418,
|
| 3701 |
+
"step": 525
|
| 3702 |
+
},
|
| 3703 |
+
{
|
| 3704 |
+
"epoch": 0.4315897435897436,
|
| 3705 |
+
"grad_norm": 1.6875,
|
| 3706 |
+
"learning_rate": 6.2622987657686305e-06,
|
| 3707 |
+
"loss": 1.171457052230835,
|
| 3708 |
+
"step": 526
|
| 3709 |
+
},
|
| 3710 |
+
{
|
| 3711 |
+
"epoch": 0.43241025641025643,
|
| 3712 |
+
"grad_norm": 1.78125,
|
| 3713 |
+
"learning_rate": 6.249564811851543e-06,
|
| 3714 |
+
"loss": 1.0790423154830933,
|
| 3715 |
+
"step": 527
|
| 3716 |
+
},
|
| 3717 |
+
{
|
| 3718 |
+
"epoch": 0.43323076923076925,
|
| 3719 |
+
"grad_norm": 1.671875,
|
| 3720 |
+
"learning_rate": 6.2368222072667865e-06,
|
| 3721 |
+
"loss": 1.0905590057373047,
|
| 3722 |
+
"step": 528
|
| 3723 |
+
},
|
| 3724 |
+
{
|
| 3725 |
+
"epoch": 0.4340512820512821,
|
| 3726 |
+
"grad_norm": 2.0,
|
| 3727 |
+
"learning_rate": 6.224071040230705e-06,
|
| 3728 |
+
"loss": 1.1238864660263062,
|
| 3729 |
+
"step": 529
|
| 3730 |
+
},
|
| 3731 |
+
{
|
| 3732 |
+
"epoch": 0.4348717948717949,
|
| 3733 |
+
"grad_norm": 1.828125,
|
| 3734 |
+
"learning_rate": 6.2113113990189165e-06,
|
| 3735 |
+
"loss": 1.1621829271316528,
|
| 3736 |
+
"step": 530
|
| 3737 |
+
},
|
| 3738 |
+
{
|
| 3739 |
+
"epoch": 0.4356923076923077,
|
| 3740 |
+
"grad_norm": 1.703125,
|
| 3741 |
+
"learning_rate": 6.198543371965711e-06,
|
| 3742 |
+
"loss": 1.1436752080917358,
|
| 3743 |
+
"step": 531
|
| 3744 |
+
},
|
| 3745 |
+
{
|
| 3746 |
+
"epoch": 0.43651282051282053,
|
| 3747 |
+
"grad_norm": 1.8046875,
|
| 3748 |
+
"learning_rate": 6.185767047463426e-06,
|
| 3749 |
+
"loss": 1.0627940893173218,
|
| 3750 |
+
"step": 532
|
| 3751 |
+
},
|
| 3752 |
+
{
|
| 3753 |
+
"epoch": 0.43733333333333335,
|
| 3754 |
+
"grad_norm": 1.640625,
|
| 3755 |
+
"learning_rate": 6.172982513961845e-06,
|
| 3756 |
+
"loss": 1.1423038244247437,
|
| 3757 |
+
"step": 533
|
| 3758 |
+
},
|
| 3759 |
+
{
|
| 3760 |
+
"epoch": 0.43815384615384617,
|
| 3761 |
+
"grad_norm": 1.8828125,
|
| 3762 |
+
"learning_rate": 6.160189859967585e-06,
|
| 3763 |
+
"loss": 1.223064661026001,
|
| 3764 |
+
"step": 534
|
| 3765 |
+
},
|
| 3766 |
+
{
|
| 3767 |
+
"epoch": 0.438974358974359,
|
| 3768 |
+
"grad_norm": 1.703125,
|
| 3769 |
+
"learning_rate": 6.1473891740434745e-06,
|
| 3770 |
+
"loss": 1.1641428470611572,
|
| 3771 |
+
"step": 535
|
| 3772 |
+
},
|
| 3773 |
+
{
|
| 3774 |
+
"epoch": 0.4397948717948718,
|
| 3775 |
+
"grad_norm": 1.7890625,
|
| 3776 |
+
"learning_rate": 6.134580544807951e-06,
|
| 3777 |
+
"loss": 1.0811842679977417,
|
| 3778 |
+
"step": 536
|
| 3779 |
+
},
|
| 3780 |
+
{
|
| 3781 |
+
"epoch": 0.44061538461538463,
|
| 3782 |
+
"grad_norm": 1.859375,
|
| 3783 |
+
"learning_rate": 6.121764060934445e-06,
|
| 3784 |
+
"loss": 1.0985914468765259,
|
| 3785 |
+
"step": 537
|
| 3786 |
+
},
|
| 3787 |
+
{
|
| 3788 |
+
"epoch": 0.44143589743589745,
|
| 3789 |
+
"grad_norm": 1.6171875,
|
| 3790 |
+
"learning_rate": 6.108939811150757e-06,
|
| 3791 |
+
"loss": 1.0501829385757446,
|
| 3792 |
+
"step": 538
|
| 3793 |
+
},
|
| 3794 |
+
{
|
| 3795 |
+
"epoch": 0.44225641025641027,
|
| 3796 |
+
"grad_norm": 1.859375,
|
| 3797 |
+
"learning_rate": 6.096107884238458e-06,
|
| 3798 |
+
"loss": 1.1243979930877686,
|
| 3799 |
+
"step": 539
|
| 3800 |
+
},
|
| 3801 |
+
{
|
| 3802 |
+
"epoch": 0.4430769230769231,
|
| 3803 |
+
"grad_norm": 1.5859375,
|
| 3804 |
+
"learning_rate": 6.0832683690322616e-06,
|
| 3805 |
+
"loss": 1.1091586351394653,
|
| 3806 |
+
"step": 540
|
| 3807 |
+
},
|
| 3808 |
+
{
|
| 3809 |
+
"epoch": 0.4438974358974359,
|
| 3810 |
+
"grad_norm": 1.78125,
|
| 3811 |
+
"learning_rate": 6.070421354419418e-06,
|
| 3812 |
+
"loss": 1.084675669670105,
|
| 3813 |
+
"step": 541
|
| 3814 |
+
},
|
| 3815 |
+
{
|
| 3816 |
+
"epoch": 0.44471794871794873,
|
| 3817 |
+
"grad_norm": 1.671875,
|
| 3818 |
+
"learning_rate": 6.057566929339096e-06,
|
| 3819 |
+
"loss": 1.0464017391204834,
|
| 3820 |
+
"step": 542
|
| 3821 |
+
},
|
| 3822 |
+
{
|
| 3823 |
+
"epoch": 0.44553846153846155,
|
| 3824 |
+
"grad_norm": 1.796875,
|
| 3825 |
+
"learning_rate": 6.0447051827817626e-06,
|
| 3826 |
+
"loss": 1.0745694637298584,
|
| 3827 |
+
"step": 543
|
| 3828 |
+
},
|
| 3829 |
+
{
|
| 3830 |
+
"epoch": 0.44635897435897437,
|
| 3831 |
+
"grad_norm": 1.7578125,
|
| 3832 |
+
"learning_rate": 6.031836203788577e-06,
|
| 3833 |
+
"loss": 1.213564395904541,
|
| 3834 |
+
"step": 544
|
| 3835 |
+
},
|
| 3836 |
+
{
|
| 3837 |
+
"epoch": 0.4471794871794872,
|
| 3838 |
+
"grad_norm": 1.7109375,
|
| 3839 |
+
"learning_rate": 6.018960081450761e-06,
|
| 3840 |
+
"loss": 1.0868102312088013,
|
| 3841 |
+
"step": 545
|
| 3842 |
+
},
|
| 3843 |
+
{
|
| 3844 |
+
"epoch": 0.448,
|
| 3845 |
+
"grad_norm": 1.7578125,
|
| 3846 |
+
"learning_rate": 6.006076904908996e-06,
|
| 3847 |
+
"loss": 1.1616239547729492,
|
| 3848 |
+
"step": 546
|
| 3849 |
+
},
|
| 3850 |
+
{
|
| 3851 |
+
"epoch": 0.44882051282051283,
|
| 3852 |
+
"grad_norm": 1.71875,
|
| 3853 |
+
"learning_rate": 5.993186763352795e-06,
|
| 3854 |
+
"loss": 1.0978038311004639,
|
| 3855 |
+
"step": 547
|
| 3856 |
+
},
|
| 3857 |
+
{
|
| 3858 |
+
"epoch": 0.44964102564102565,
|
| 3859 |
+
"grad_norm": 1.828125,
|
| 3860 |
+
"learning_rate": 5.980289746019892e-06,
|
| 3861 |
+
"loss": 1.094117283821106,
|
| 3862 |
+
"step": 548
|
| 3863 |
+
},
|
| 3864 |
+
{
|
| 3865 |
+
"epoch": 0.45046153846153847,
|
| 3866 |
+
"grad_norm": 1.6484375,
|
| 3867 |
+
"learning_rate": 5.967385942195617e-06,
|
| 3868 |
+
"loss": 1.1996148824691772,
|
| 3869 |
+
"step": 549
|
| 3870 |
+
},
|
| 3871 |
+
{
|
| 3872 |
+
"epoch": 0.4512820512820513,
|
| 3873 |
+
"grad_norm": 1.65625,
|
| 3874 |
+
"learning_rate": 5.954475441212291e-06,
|
| 3875 |
+
"loss": 1.1614571809768677,
|
| 3876 |
+
"step": 550
|
| 3877 |
+
},
|
| 3878 |
+
{
|
| 3879 |
+
"epoch": 0.4521025641025641,
|
| 3880 |
+
"grad_norm": 1.5390625,
|
| 3881 |
+
"learning_rate": 5.9415583324485895e-06,
|
| 3882 |
+
"loss": 1.0338847637176514,
|
| 3883 |
+
"step": 551
|
| 3884 |
+
},
|
| 3885 |
+
{
|
| 3886 |
+
"epoch": 0.45292307692307693,
|
| 3887 |
+
"grad_norm": 1.921875,
|
| 3888 |
+
"learning_rate": 5.928634705328941e-06,
|
| 3889 |
+
"loss": 1.1728886365890503,
|
| 3890 |
+
"step": 552
|
| 3891 |
+
},
|
| 3892 |
+
{
|
| 3893 |
+
"epoch": 0.45374358974358975,
|
| 3894 |
+
"grad_norm": 1.8203125,
|
| 3895 |
+
"learning_rate": 5.915704649322895e-06,
|
| 3896 |
+
"loss": 1.1609104871749878,
|
| 3897 |
+
"step": 553
|
| 3898 |
+
},
|
| 3899 |
+
{
|
| 3900 |
+
"epoch": 0.45456410256410257,
|
| 3901 |
+
"grad_norm": 1.9453125,
|
| 3902 |
+
"learning_rate": 5.902768253944511e-06,
|
| 3903 |
+
"loss": 1.1188726425170898,
|
| 3904 |
+
"step": 554
|
| 3905 |
+
},
|
| 3906 |
+
{
|
| 3907 |
+
"epoch": 0.4553846153846154,
|
| 3908 |
+
"grad_norm": 1.578125,
|
| 3909 |
+
"learning_rate": 5.889825608751735e-06,
|
| 3910 |
+
"loss": 1.1307368278503418,
|
| 3911 |
+
"step": 555
|
| 3912 |
+
},
|
| 3913 |
+
{
|
| 3914 |
+
"epoch": 0.4562051282051282,
|
| 3915 |
+
"grad_norm": 1.9140625,
|
| 3916 |
+
"learning_rate": 5.876876803345777e-06,
|
| 3917 |
+
"loss": 1.1245629787445068,
|
| 3918 |
+
"step": 556
|
| 3919 |
+
},
|
| 3920 |
+
{
|
| 3921 |
+
"epoch": 0.457025641025641,
|
| 3922 |
+
"grad_norm": 1.671875,
|
| 3923 |
+
"learning_rate": 5.8639219273704985e-06,
|
| 3924 |
+
"loss": 1.0736263990402222,
|
| 3925 |
+
"step": 557
|
| 3926 |
+
},
|
| 3927 |
+
{
|
| 3928 |
+
"epoch": 0.45784615384615385,
|
| 3929 |
+
"grad_norm": 1.90625,
|
| 3930 |
+
"learning_rate": 5.850961070511783e-06,
|
| 3931 |
+
"loss": 1.148109793663025,
|
| 3932 |
+
"step": 558
|
| 3933 |
+
},
|
| 3934 |
+
{
|
| 3935 |
+
"epoch": 0.45866666666666667,
|
| 3936 |
+
"grad_norm": 1.796875,
|
| 3937 |
+
"learning_rate": 5.837994322496925e-06,
|
| 3938 |
+
"loss": 1.1444931030273438,
|
| 3939 |
+
"step": 559
|
| 3940 |
+
},
|
| 3941 |
+
{
|
| 3942 |
+
"epoch": 0.4594871794871795,
|
| 3943 |
+
"grad_norm": 1.6953125,
|
| 3944 |
+
"learning_rate": 5.825021773093997e-06,
|
| 3945 |
+
"loss": 1.2212135791778564,
|
| 3946 |
+
"step": 560
|
| 3947 |
+
},
|
| 3948 |
+
{
|
| 3949 |
+
"epoch": 0.4603076923076923,
|
| 3950 |
+
"grad_norm": 1.6640625,
|
| 3951 |
+
"learning_rate": 5.812043512111237e-06,
|
| 3952 |
+
"loss": 1.10701322555542,
|
| 3953 |
+
"step": 561
|
| 3954 |
+
},
|
| 3955 |
+
{
|
| 3956 |
+
"epoch": 0.4611282051282051,
|
| 3957 |
+
"grad_norm": 1.6640625,
|
| 3958 |
+
"learning_rate": 5.799059629396424e-06,
|
| 3959 |
+
"loss": 1.1263543367385864,
|
| 3960 |
+
"step": 562
|
| 3961 |
+
},
|
| 3962 |
+
{
|
| 3963 |
+
"epoch": 0.46194871794871795,
|
| 3964 |
+
"grad_norm": 1.5703125,
|
| 3965 |
+
"learning_rate": 5.786070214836255e-06,
|
| 3966 |
+
"loss": 1.102921485900879,
|
| 3967 |
+
"step": 563
|
| 3968 |
+
},
|
| 3969 |
+
{
|
| 3970 |
+
"epoch": 0.46276923076923077,
|
| 3971 |
+
"grad_norm": 1.6484375,
|
| 3972 |
+
"learning_rate": 5.773075358355724e-06,
|
| 3973 |
+
"loss": 1.0392452478408813,
|
| 3974 |
+
"step": 564
|
| 3975 |
+
},
|
| 3976 |
+
{
|
| 3977 |
+
"epoch": 0.4635897435897436,
|
| 3978 |
+
"grad_norm": 1.734375,
|
| 3979 |
+
"learning_rate": 5.7600751499175015e-06,
|
| 3980 |
+
"loss": 1.1073130369186401,
|
| 3981 |
+
"step": 565
|
| 3982 |
+
},
|
| 3983 |
+
{
|
| 3984 |
+
"epoch": 0.4644102564102564,
|
| 3985 |
+
"grad_norm": 1.875,
|
| 3986 |
+
"learning_rate": 5.747069679521306e-06,
|
| 3987 |
+
"loss": 1.078183889389038,
|
| 3988 |
+
"step": 566
|
| 3989 |
+
},
|
| 3990 |
+
{
|
| 3991 |
+
"epoch": 0.4652307692307692,
|
| 3992 |
+
"grad_norm": 1.6640625,
|
| 3993 |
+
"learning_rate": 5.734059037203285e-06,
|
| 3994 |
+
"loss": 1.1380183696746826,
|
| 3995 |
+
"step": 567
|
| 3996 |
+
},
|
| 3997 |
+
{
|
| 3998 |
+
"epoch": 0.46605128205128205,
|
| 3999 |
+
"grad_norm": 1.71875,
|
| 4000 |
+
"learning_rate": 5.7210433130353906e-06,
|
| 4001 |
+
"loss": 1.040786623954773,
|
| 4002 |
+
"step": 568
|
| 4003 |
+
},
|
| 4004 |
+
{
|
| 4005 |
+
"epoch": 0.46687179487179487,
|
| 4006 |
+
"grad_norm": 1.8828125,
|
| 4007 |
+
"learning_rate": 5.7080225971247586e-06,
|
| 4008 |
+
"loss": 1.1665704250335693,
|
| 4009 |
+
"step": 569
|
| 4010 |
+
},
|
| 4011 |
+
{
|
| 4012 |
+
"epoch": 0.4676923076923077,
|
| 4013 |
+
"grad_norm": 1.796875,
|
| 4014 |
+
"learning_rate": 5.6949969796130814e-06,
|
| 4015 |
+
"loss": 1.097498893737793,
|
| 4016 |
+
"step": 570
|
| 4017 |
+
},
|
| 4018 |
+
{
|
| 4019 |
+
"epoch": 0.4685128205128205,
|
| 4020 |
+
"grad_norm": 1.7734375,
|
| 4021 |
+
"learning_rate": 5.681966550675981e-06,
|
| 4022 |
+
"loss": 1.1670600175857544,
|
| 4023 |
+
"step": 571
|
| 4024 |
+
},
|
| 4025 |
+
{
|
| 4026 |
+
"epoch": 0.4693333333333333,
|
| 4027 |
+
"grad_norm": 1.75,
|
| 4028 |
+
"learning_rate": 5.668931400522396e-06,
|
| 4029 |
+
"loss": 1.1670058965682983,
|
| 4030 |
+
"step": 572
|
| 4031 |
+
},
|
| 4032 |
+
{
|
| 4033 |
+
"epoch": 0.47015384615384614,
|
| 4034 |
+
"grad_norm": 1.640625,
|
| 4035 |
+
"learning_rate": 5.655891619393943e-06,
|
| 4036 |
+
"loss": 1.1546249389648438,
|
| 4037 |
+
"step": 573
|
| 4038 |
+
},
|
| 4039 |
+
{
|
| 4040 |
+
"epoch": 0.47097435897435896,
|
| 4041 |
+
"grad_norm": 1.734375,
|
| 4042 |
+
"learning_rate": 5.642847297564303e-06,
|
| 4043 |
+
"loss": 1.1407477855682373,
|
| 4044 |
+
"step": 574
|
| 4045 |
+
},
|
| 4046 |
+
{
|
| 4047 |
+
"epoch": 0.4717948717948718,
|
| 4048 |
+
"grad_norm": 1.6484375,
|
| 4049 |
+
"learning_rate": 5.629798525338589e-06,
|
| 4050 |
+
"loss": 1.1352155208587646,
|
| 4051 |
+
"step": 575
|
| 4052 |
+
},
|
| 4053 |
+
{
|
| 4054 |
+
"epoch": 0.4726153846153846,
|
| 4055 |
+
"grad_norm": 1.765625,
|
| 4056 |
+
"learning_rate": 5.616745393052725e-06,
|
| 4057 |
+
"loss": 1.0495390892028809,
|
| 4058 |
+
"step": 576
|
| 4059 |
+
},
|
| 4060 |
+
{
|
| 4061 |
+
"epoch": 0.4734358974358974,
|
| 4062 |
+
"grad_norm": 1.6015625,
|
| 4063 |
+
"learning_rate": 5.603687991072821e-06,
|
| 4064 |
+
"loss": 1.1620739698410034,
|
| 4065 |
+
"step": 577
|
| 4066 |
+
},
|
| 4067 |
+
{
|
| 4068 |
+
"epoch": 0.47425641025641024,
|
| 4069 |
+
"grad_norm": 1.7421875,
|
| 4070 |
+
"learning_rate": 5.5906264097945405e-06,
|
| 4071 |
+
"loss": 1.1315065622329712,
|
| 4072 |
+
"step": 578
|
| 4073 |
+
},
|
| 4074 |
+
{
|
| 4075 |
+
"epoch": 0.47507692307692306,
|
| 4076 |
+
"grad_norm": 1.8359375,
|
| 4077 |
+
"learning_rate": 5.577560739642488e-06,
|
| 4078 |
+
"loss": 1.172524333000183,
|
| 4079 |
+
"step": 579
|
| 4080 |
+
},
|
| 4081 |
+
{
|
| 4082 |
+
"epoch": 0.4758974358974359,
|
| 4083 |
+
"grad_norm": 1.71875,
|
| 4084 |
+
"learning_rate": 5.564491071069571e-06,
|
| 4085 |
+
"loss": 1.1895794868469238,
|
| 4086 |
+
"step": 580
|
| 4087 |
+
},
|
| 4088 |
+
{
|
| 4089 |
+
"epoch": 0.4767179487179487,
|
| 4090 |
+
"grad_norm": 1.546875,
|
| 4091 |
+
"learning_rate": 5.551417494556376e-06,
|
| 4092 |
+
"loss": 1.0950872898101807,
|
| 4093 |
+
"step": 581
|
| 4094 |
+
},
|
| 4095 |
+
{
|
| 4096 |
+
"epoch": 0.4775384615384615,
|
| 4097 |
+
"grad_norm": 1.546875,
|
| 4098 |
+
"learning_rate": 5.538340100610547e-06,
|
| 4099 |
+
"loss": 1.13210129737854,
|
| 4100 |
+
"step": 582
|
| 4101 |
+
},
|
| 4102 |
+
{
|
| 4103 |
+
"epoch": 0.47835897435897434,
|
| 4104 |
+
"grad_norm": 1.7890625,
|
| 4105 |
+
"learning_rate": 5.525258979766153e-06,
|
| 4106 |
+
"loss": 1.141761302947998,
|
| 4107 |
+
"step": 583
|
| 4108 |
+
},
|
| 4109 |
+
{
|
| 4110 |
+
"epoch": 0.47917948717948716,
|
| 4111 |
+
"grad_norm": 1.71875,
|
| 4112 |
+
"learning_rate": 5.512174222583066e-06,
|
| 4113 |
+
"loss": 1.1856606006622314,
|
| 4114 |
+
"step": 584
|
| 4115 |
+
},
|
| 4116 |
+
{
|
| 4117 |
+
"epoch": 0.48,
|
| 4118 |
+
"grad_norm": 1.84375,
|
| 4119 |
+
"learning_rate": 5.499085919646335e-06,
|
| 4120 |
+
"loss": 1.179734706878662,
|
| 4121 |
+
"step": 585
|
| 4122 |
+
},
|
| 4123 |
+
{
|
| 4124 |
+
"epoch": 0.4808205128205128,
|
| 4125 |
+
"grad_norm": 1.71875,
|
| 4126 |
+
"learning_rate": 5.4859941615655495e-06,
|
| 4127 |
+
"loss": 1.122536063194275,
|
| 4128 |
+
"step": 586
|
| 4129 |
+
},
|
| 4130 |
+
{
|
| 4131 |
+
"epoch": 0.4816410256410256,
|
| 4132 |
+
"grad_norm": 1.546875,
|
| 4133 |
+
"learning_rate": 5.472899038974225e-06,
|
| 4134 |
+
"loss": 1.022833228111267,
|
| 4135 |
+
"step": 587
|
| 4136 |
+
},
|
| 4137 |
+
{
|
| 4138 |
+
"epoch": 0.48246153846153844,
|
| 4139 |
+
"grad_norm": 1.96875,
|
| 4140 |
+
"learning_rate": 5.459800642529165e-06,
|
| 4141 |
+
"loss": 1.1497764587402344,
|
| 4142 |
+
"step": 588
|
| 4143 |
+
},
|
| 4144 |
+
{
|
| 4145 |
+
"epoch": 0.48328205128205126,
|
| 4146 |
+
"grad_norm": 1.9140625,
|
| 4147 |
+
"learning_rate": 5.44669906290984e-06,
|
| 4148 |
+
"loss": 1.1021367311477661,
|
| 4149 |
+
"step": 589
|
| 4150 |
+
},
|
| 4151 |
+
{
|
| 4152 |
+
"epoch": 0.4841025641025641,
|
| 4153 |
+
"grad_norm": 1.6171875,
|
| 4154 |
+
"learning_rate": 5.433594390817755e-06,
|
| 4155 |
+
"loss": 1.1025938987731934,
|
| 4156 |
+
"step": 590
|
| 4157 |
+
},
|
| 4158 |
+
{
|
| 4159 |
+
"epoch": 0.4849230769230769,
|
| 4160 |
+
"grad_norm": 1.6640625,
|
| 4161 |
+
"learning_rate": 5.4204867169758265e-06,
|
| 4162 |
+
"loss": 1.1344976425170898,
|
| 4163 |
+
"step": 591
|
| 4164 |
+
},
|
| 4165 |
+
{
|
| 4166 |
+
"epoch": 0.4857435897435897,
|
| 4167 |
+
"grad_norm": 1.671875,
|
| 4168 |
+
"learning_rate": 5.407376132127754e-06,
|
| 4169 |
+
"loss": 1.173427939414978,
|
| 4170 |
+
"step": 592
|
| 4171 |
+
},
|
| 4172 |
+
{
|
| 4173 |
+
"epoch": 0.48656410256410254,
|
| 4174 |
+
"grad_norm": 1.6953125,
|
| 4175 |
+
"learning_rate": 5.394262727037382e-06,
|
| 4176 |
+
"loss": 1.0970546007156372,
|
| 4177 |
+
"step": 593
|
| 4178 |
+
},
|
| 4179 |
+
{
|
| 4180 |
+
"epoch": 0.48738461538461536,
|
| 4181 |
+
"grad_norm": 1.71875,
|
| 4182 |
+
"learning_rate": 5.381146592488089e-06,
|
| 4183 |
+
"loss": 1.1332324743270874,
|
| 4184 |
+
"step": 594
|
| 4185 |
+
},
|
| 4186 |
+
{
|
| 4187 |
+
"epoch": 0.4882051282051282,
|
| 4188 |
+
"grad_norm": 1.75,
|
| 4189 |
+
"learning_rate": 5.368027819282144e-06,
|
| 4190 |
+
"loss": 1.0618067979812622,
|
| 4191 |
+
"step": 595
|
| 4192 |
+
},
|
| 4193 |
+
{
|
| 4194 |
+
"epoch": 0.489025641025641,
|
| 4195 |
+
"grad_norm": 1.796875,
|
| 4196 |
+
"learning_rate": 5.35490649824008e-06,
|
| 4197 |
+
"loss": 1.2127296924591064,
|
| 4198 |
+
"step": 596
|
| 4199 |
+
},
|
| 4200 |
+
{
|
| 4201 |
+
"epoch": 0.4898461538461538,
|
| 4202 |
+
"grad_norm": 1.875,
|
| 4203 |
+
"learning_rate": 5.341782720200077e-06,
|
| 4204 |
+
"loss": 1.248301386833191,
|
| 4205 |
+
"step": 597
|
| 4206 |
+
},
|
| 4207 |
+
{
|
| 4208 |
+
"epoch": 0.49066666666666664,
|
| 4209 |
+
"grad_norm": 1.6875,
|
| 4210 |
+
"learning_rate": 5.328656576017317e-06,
|
| 4211 |
+
"loss": 1.1144038438796997,
|
| 4212 |
+
"step": 598
|
| 4213 |
+
},
|
| 4214 |
+
{
|
| 4215 |
+
"epoch": 0.49148717948717946,
|
| 4216 |
+
"grad_norm": 1.7734375,
|
| 4217 |
+
"learning_rate": 5.315528156563368e-06,
|
| 4218 |
+
"loss": 1.2089176177978516,
|
| 4219 |
+
"step": 599
|
| 4220 |
+
},
|
| 4221 |
+
{
|
| 4222 |
+
"epoch": 0.49230769230769234,
|
| 4223 |
+
"grad_norm": 1.96875,
|
| 4224 |
+
"learning_rate": 5.302397552725548e-06,
|
| 4225 |
+
"loss": 1.1805378198623657,
|
| 4226 |
+
"step": 600
|
| 4227 |
+
},
|
| 4228 |
+
{
|
| 4229 |
+
"epoch": 0.49312820512820515,
|
| 4230 |
+
"grad_norm": 1.734375,
|
| 4231 |
+
"learning_rate": 5.289264855406295e-06,
|
| 4232 |
+
"loss": 1.122835636138916,
|
| 4233 |
+
"step": 601
|
| 4234 |
+
},
|
| 4235 |
+
{
|
| 4236 |
+
"epoch": 0.493948717948718,
|
| 4237 |
+
"grad_norm": 1.703125,
|
| 4238 |
+
"learning_rate": 5.276130155522541e-06,
|
| 4239 |
+
"loss": 1.113465428352356,
|
| 4240 |
+
"step": 602
|
| 4241 |
+
},
|
| 4242 |
+
{
|
| 4243 |
+
"epoch": 0.4947692307692308,
|
| 4244 |
+
"grad_norm": 1.8046875,
|
| 4245 |
+
"learning_rate": 5.262993544005086e-06,
|
| 4246 |
+
"loss": 1.1617039442062378,
|
| 4247 |
+
"step": 603
|
| 4248 |
+
},
|
| 4249 |
+
{
|
| 4250 |
+
"epoch": 0.4955897435897436,
|
| 4251 |
+
"grad_norm": 1.8359375,
|
| 4252 |
+
"learning_rate": 5.2498551117979565e-06,
|
| 4253 |
+
"loss": 1.063246488571167,
|
| 4254 |
+
"step": 604
|
| 4255 |
+
},
|
| 4256 |
+
{
|
| 4257 |
+
"epoch": 0.49641025641025643,
|
| 4258 |
+
"grad_norm": 1.9296875,
|
| 4259 |
+
"learning_rate": 5.23671494985779e-06,
|
| 4260 |
+
"loss": 1.1420445442199707,
|
| 4261 |
+
"step": 605
|
| 4262 |
+
},
|
| 4263 |
+
{
|
| 4264 |
+
"epoch": 0.49723076923076925,
|
| 4265 |
+
"grad_norm": 1.7578125,
|
| 4266 |
+
"learning_rate": 5.223573149153197e-06,
|
| 4267 |
+
"loss": 1.167624831199646,
|
| 4268 |
+
"step": 606
|
| 4269 |
+
},
|
| 4270 |
+
{
|
| 4271 |
+
"epoch": 0.4980512820512821,
|
| 4272 |
+
"grad_norm": 1.6171875,
|
| 4273 |
+
"learning_rate": 5.210429800664133e-06,
|
| 4274 |
+
"loss": 1.1366316080093384,
|
| 4275 |
+
"step": 607
|
| 4276 |
+
},
|
| 4277 |
+
{
|
| 4278 |
+
"epoch": 0.4988717948717949,
|
| 4279 |
+
"grad_norm": 1.859375,
|
| 4280 |
+
"learning_rate": 5.197284995381264e-06,
|
| 4281 |
+
"loss": 1.1676878929138184,
|
| 4282 |
+
"step": 608
|
| 4283 |
+
},
|
| 4284 |
+
{
|
| 4285 |
+
"epoch": 0.4996923076923077,
|
| 4286 |
+
"grad_norm": 1.734375,
|
| 4287 |
+
"learning_rate": 5.1841388243053506e-06,
|
| 4288 |
+
"loss": 1.1899088621139526,
|
| 4289 |
+
"step": 609
|
| 4290 |
+
},
|
| 4291 |
+
{
|
| 4292 |
+
"epoch": 0.5005128205128205,
|
| 4293 |
+
"grad_norm": 1.8671875,
|
| 4294 |
+
"learning_rate": 5.1709913784466015e-06,
|
| 4295 |
+
"loss": 1.216658592224121,
|
| 4296 |
+
"step": 610
|
| 4297 |
+
},
|
| 4298 |
+
{
|
| 4299 |
+
"epoch": 0.5013333333333333,
|
| 4300 |
+
"grad_norm": 1.9140625,
|
| 4301 |
+
"learning_rate": 5.157842748824053e-06,
|
| 4302 |
+
"loss": 1.2003830671310425,
|
| 4303 |
+
"step": 611
|
| 4304 |
+
},
|
| 4305 |
+
{
|
| 4306 |
+
"epoch": 0.5021538461538462,
|
| 4307 |
+
"grad_norm": 1.6640625,
|
| 4308 |
+
"learning_rate": 5.144693026464937e-06,
|
| 4309 |
+
"loss": 1.074521541595459,
|
| 4310 |
+
"step": 612
|
| 4311 |
+
},
|
| 4312 |
+
{
|
| 4313 |
+
"epoch": 0.5029743589743589,
|
| 4314 |
+
"grad_norm": 1.6796875,
|
| 4315 |
+
"learning_rate": 5.1315423024040485e-06,
|
| 4316 |
+
"loss": 1.1810098886489868,
|
| 4317 |
+
"step": 613
|
| 4318 |
+
},
|
| 4319 |
+
{
|
| 4320 |
+
"epoch": 0.5037948717948718,
|
| 4321 |
+
"grad_norm": 1.59375,
|
| 4322 |
+
"learning_rate": 5.1183906676831195e-06,
|
| 4323 |
+
"loss": 1.1852682828903198,
|
| 4324 |
+
"step": 614
|
| 4325 |
+
},
|
| 4326 |
+
{
|
| 4327 |
+
"epoch": 0.5046153846153846,
|
| 4328 |
+
"grad_norm": 1.8671875,
|
| 4329 |
+
"learning_rate": 5.105238213350187e-06,
|
| 4330 |
+
"loss": 1.1723006963729858,
|
| 4331 |
+
"step": 615
|
| 4332 |
+
},
|
| 4333 |
+
{
|
| 4334 |
+
"epoch": 0.5054358974358975,
|
| 4335 |
+
"grad_norm": 1.78125,
|
| 4336 |
+
"learning_rate": 5.092085030458957e-06,
|
| 4337 |
+
"loss": 1.0914887189865112,
|
| 4338 |
+
"step": 616
|
| 4339 |
+
},
|
| 4340 |
+
{
|
| 4341 |
+
"epoch": 0.5062564102564102,
|
| 4342 |
+
"grad_norm": 1.734375,
|
| 4343 |
+
"learning_rate": 5.0789312100681854e-06,
|
| 4344 |
+
"loss": 1.122653603553772,
|
| 4345 |
+
"step": 617
|
| 4346 |
+
},
|
| 4347 |
+
{
|
| 4348 |
+
"epoch": 0.5070769230769231,
|
| 4349 |
+
"grad_norm": 1.6796875,
|
| 4350 |
+
"learning_rate": 5.065776843241038e-06,
|
| 4351 |
+
"loss": 1.2329943180084229,
|
| 4352 |
+
"step": 618
|
| 4353 |
+
},
|
| 4354 |
+
{
|
| 4355 |
+
"epoch": 0.5078974358974359,
|
| 4356 |
+
"grad_norm": 1.8984375,
|
| 4357 |
+
"learning_rate": 5.052622021044464e-06,
|
| 4358 |
+
"loss": 1.1077961921691895,
|
| 4359 |
+
"step": 619
|
| 4360 |
+
},
|
| 4361 |
+
{
|
| 4362 |
+
"epoch": 0.5087179487179487,
|
| 4363 |
+
"grad_norm": 1.8046875,
|
| 4364 |
+
"learning_rate": 5.039466834548568e-06,
|
| 4365 |
+
"loss": 1.0798243284225464,
|
| 4366 |
+
"step": 620
|
| 4367 |
+
},
|
| 4368 |
+
{
|
| 4369 |
+
"epoch": 0.5095384615384615,
|
| 4370 |
+
"grad_norm": 1.796875,
|
| 4371 |
+
"learning_rate": 5.026311374825969e-06,
|
| 4372 |
+
"loss": 1.0827723741531372,
|
| 4373 |
+
"step": 621
|
| 4374 |
+
},
|
| 4375 |
+
{
|
| 4376 |
+
"epoch": 0.5103589743589744,
|
| 4377 |
+
"grad_norm": 1.7734375,
|
| 4378 |
+
"learning_rate": 5.01315573295119e-06,
|
| 4379 |
+
"loss": 1.1512647867202759,
|
| 4380 |
+
"step": 622
|
| 4381 |
+
},
|
| 4382 |
+
{
|
| 4383 |
+
"epoch": 0.5111794871794871,
|
| 4384 |
+
"grad_norm": 1.7578125,
|
| 4385 |
+
"learning_rate": 5e-06,
|
| 4386 |
+
"loss": 1.1062994003295898,
|
| 4387 |
+
"step": 623
|
| 4388 |
+
},
|
| 4389 |
+
{
|
| 4390 |
+
"epoch": 0.512,
|
| 4391 |
+
"grad_norm": 1.6171875,
|
| 4392 |
+
"learning_rate": 4.986844267048812e-06,
|
| 4393 |
+
"loss": 1.0597025156021118,
|
| 4394 |
+
"step": 624
|
| 4395 |
+
},
|
| 4396 |
+
{
|
| 4397 |
+
"epoch": 0.5128205128205128,
|
| 4398 |
+
"grad_norm": 1.765625,
|
| 4399 |
+
"learning_rate": 4.973688625174031e-06,
|
| 4400 |
+
"loss": 1.1590633392333984,
|
| 4401 |
+
"step": 625
|
| 4402 |
+
},
|
| 4403 |
+
{
|
| 4404 |
+
"epoch": 0.5136410256410257,
|
| 4405 |
+
"grad_norm": 1.59375,
|
| 4406 |
+
"learning_rate": 4.960533165451435e-06,
|
| 4407 |
+
"loss": 1.08056640625,
|
| 4408 |
+
"step": 626
|
| 4409 |
+
},
|
| 4410 |
+
{
|
| 4411 |
+
"epoch": 0.5144615384615384,
|
| 4412 |
+
"grad_norm": 1.8828125,
|
| 4413 |
+
"learning_rate": 4.947377978955537e-06,
|
| 4414 |
+
"loss": 1.1823807954788208,
|
| 4415 |
+
"step": 627
|
| 4416 |
+
},
|
| 4417 |
+
{
|
| 4418 |
+
"epoch": 0.5152820512820513,
|
| 4419 |
+
"grad_norm": 1.6953125,
|
| 4420 |
+
"learning_rate": 4.934223156758963e-06,
|
| 4421 |
+
"loss": 1.1003363132476807,
|
| 4422 |
+
"step": 628
|
| 4423 |
+
},
|
| 4424 |
+
{
|
| 4425 |
+
"epoch": 0.516102564102564,
|
| 4426 |
+
"grad_norm": 1.6484375,
|
| 4427 |
+
"learning_rate": 4.921068789931816e-06,
|
| 4428 |
+
"loss": 1.088804006576538,
|
| 4429 |
+
"step": 629
|
| 4430 |
+
},
|
| 4431 |
+
{
|
| 4432 |
+
"epoch": 0.5169230769230769,
|
| 4433 |
+
"grad_norm": 1.6953125,
|
| 4434 |
+
"learning_rate": 4.907914969541044e-06,
|
| 4435 |
+
"loss": 1.1383732557296753,
|
| 4436 |
+
"step": 630
|
| 4437 |
+
},
|
| 4438 |
+
{
|
| 4439 |
+
"epoch": 0.5177435897435897,
|
| 4440 |
+
"grad_norm": 1.96875,
|
| 4441 |
+
"learning_rate": 4.894761786649815e-06,
|
| 4442 |
+
"loss": 1.205079197883606,
|
| 4443 |
+
"step": 631
|
| 4444 |
+
},
|
| 4445 |
+
{
|
| 4446 |
+
"epoch": 0.5185641025641026,
|
| 4447 |
+
"grad_norm": 1.78125,
|
| 4448 |
+
"learning_rate": 4.881609332316881e-06,
|
| 4449 |
+
"loss": 1.15359628200531,
|
| 4450 |
+
"step": 632
|
| 4451 |
+
},
|
| 4452 |
+
{
|
| 4453 |
+
"epoch": 0.5193846153846153,
|
| 4454 |
+
"grad_norm": 1.890625,
|
| 4455 |
+
"learning_rate": 4.868457697595952e-06,
|
| 4456 |
+
"loss": 1.0927237272262573,
|
| 4457 |
+
"step": 633
|
| 4458 |
+
},
|
| 4459 |
+
{
|
| 4460 |
+
"epoch": 0.5202051282051282,
|
| 4461 |
+
"grad_norm": 1.7265625,
|
| 4462 |
+
"learning_rate": 4.855306973535064e-06,
|
| 4463 |
+
"loss": 1.1109284162521362,
|
| 4464 |
+
"step": 634
|
| 4465 |
+
},
|
| 4466 |
+
{
|
| 4467 |
+
"epoch": 0.521025641025641,
|
| 4468 |
+
"grad_norm": 1.734375,
|
| 4469 |
+
"learning_rate": 4.842157251175947e-06,
|
| 4470 |
+
"loss": 1.135011911392212,
|
| 4471 |
+
"step": 635
|
| 4472 |
+
},
|
| 4473 |
+
{
|
| 4474 |
+
"epoch": 0.5218461538461538,
|
| 4475 |
+
"grad_norm": 1.7421875,
|
| 4476 |
+
"learning_rate": 4.829008621553401e-06,
|
| 4477 |
+
"loss": 1.2087589502334595,
|
| 4478 |
+
"step": 636
|
| 4479 |
+
},
|
| 4480 |
+
{
|
| 4481 |
+
"epoch": 0.5226666666666666,
|
| 4482 |
+
"grad_norm": 1.78125,
|
| 4483 |
+
"learning_rate": 4.815861175694651e-06,
|
| 4484 |
+
"loss": 1.0901511907577515,
|
| 4485 |
+
"step": 637
|
| 4486 |
+
},
|
| 4487 |
+
{
|
| 4488 |
+
"epoch": 0.5234871794871795,
|
| 4489 |
+
"grad_norm": 1.75,
|
| 4490 |
+
"learning_rate": 4.802715004618737e-06,
|
| 4491 |
+
"loss": 1.1220208406448364,
|
| 4492 |
+
"step": 638
|
| 4493 |
+
},
|
| 4494 |
+
{
|
| 4495 |
+
"epoch": 0.5243076923076923,
|
| 4496 |
+
"grad_norm": 1.765625,
|
| 4497 |
+
"learning_rate": 4.789570199335869e-06,
|
| 4498 |
+
"loss": 1.135562539100647,
|
| 4499 |
+
"step": 639
|
| 4500 |
+
},
|
| 4501 |
+
{
|
| 4502 |
+
"epoch": 0.5251282051282051,
|
| 4503 |
+
"grad_norm": 1.8515625,
|
| 4504 |
+
"learning_rate": 4.776426850846803e-06,
|
| 4505 |
+
"loss": 1.089937448501587,
|
| 4506 |
+
"step": 640
|
| 4507 |
+
},
|
| 4508 |
+
{
|
| 4509 |
+
"epoch": 0.525948717948718,
|
| 4510 |
+
"grad_norm": 1.8359375,
|
| 4511 |
+
"learning_rate": 4.763285050142211e-06,
|
| 4512 |
+
"loss": 1.169063925743103,
|
| 4513 |
+
"step": 641
|
| 4514 |
+
},
|
| 4515 |
+
{
|
| 4516 |
+
"epoch": 0.5267692307692308,
|
| 4517 |
+
"grad_norm": 1.859375,
|
| 4518 |
+
"learning_rate": 4.750144888202045e-06,
|
| 4519 |
+
"loss": 1.1145635843276978,
|
| 4520 |
+
"step": 642
|
| 4521 |
+
},
|
| 4522 |
+
{
|
| 4523 |
+
"epoch": 0.5275897435897436,
|
| 4524 |
+
"grad_norm": 1.8046875,
|
| 4525 |
+
"learning_rate": 4.7370064559949155e-06,
|
| 4526 |
+
"loss": 1.0948618650436401,
|
| 4527 |
+
"step": 643
|
| 4528 |
+
},
|
| 4529 |
+
{
|
| 4530 |
+
"epoch": 0.5284102564102564,
|
| 4531 |
+
"grad_norm": 1.734375,
|
| 4532 |
+
"learning_rate": 4.72386984447746e-06,
|
| 4533 |
+
"loss": 1.1526923179626465,
|
| 4534 |
+
"step": 644
|
| 4535 |
+
},
|
| 4536 |
+
{
|
| 4537 |
+
"epoch": 0.5292307692307693,
|
| 4538 |
+
"grad_norm": 1.625,
|
| 4539 |
+
"learning_rate": 4.710735144593707e-06,
|
| 4540 |
+
"loss": 1.1131905317306519,
|
| 4541 |
+
"step": 645
|
| 4542 |
+
},
|
| 4543 |
+
{
|
| 4544 |
+
"epoch": 0.530051282051282,
|
| 4545 |
+
"grad_norm": 1.828125,
|
| 4546 |
+
"learning_rate": 4.697602447274454e-06,
|
| 4547 |
+
"loss": 1.137904405593872,
|
| 4548 |
+
"step": 646
|
| 4549 |
+
},
|
| 4550 |
+
{
|
| 4551 |
+
"epoch": 0.5308717948717949,
|
| 4552 |
+
"grad_norm": 1.6484375,
|
| 4553 |
+
"learning_rate": 4.684471843436633e-06,
|
| 4554 |
+
"loss": 1.091780662536621,
|
| 4555 |
+
"step": 647
|
| 4556 |
+
},
|
| 4557 |
+
{
|
| 4558 |
+
"epoch": 0.5316923076923077,
|
| 4559 |
+
"grad_norm": 1.828125,
|
| 4560 |
+
"learning_rate": 4.671343423982684e-06,
|
| 4561 |
+
"loss": 1.0692613124847412,
|
| 4562 |
+
"step": 648
|
| 4563 |
+
},
|
| 4564 |
+
{
|
| 4565 |
+
"epoch": 0.5325128205128206,
|
| 4566 |
+
"grad_norm": 1.828125,
|
| 4567 |
+
"learning_rate": 4.658217279799925e-06,
|
| 4568 |
+
"loss": 1.2122803926467896,
|
| 4569 |
+
"step": 649
|
| 4570 |
+
},
|
| 4571 |
+
{
|
| 4572 |
+
"epoch": 0.5333333333333333,
|
| 4573 |
+
"grad_norm": 1.703125,
|
| 4574 |
+
"learning_rate": 4.6450935017599205e-06,
|
| 4575 |
+
"loss": 1.086571455001831,
|
| 4576 |
+
"step": 650
|
| 4577 |
+
},
|
| 4578 |
+
{
|
| 4579 |
+
"epoch": 0.5341538461538462,
|
| 4580 |
+
"grad_norm": 1.90625,
|
| 4581 |
+
"learning_rate": 4.631972180717859e-06,
|
| 4582 |
+
"loss": 1.1877696514129639,
|
| 4583 |
+
"step": 651
|
| 4584 |
+
},
|
| 4585 |
+
{
|
| 4586 |
+
"epoch": 0.534974358974359,
|
| 4587 |
+
"grad_norm": 1.8671875,
|
| 4588 |
+
"learning_rate": 4.618853407511912e-06,
|
| 4589 |
+
"loss": 1.0836175680160522,
|
| 4590 |
+
"step": 652
|
| 4591 |
+
},
|
| 4592 |
+
{
|
| 4593 |
+
"epoch": 0.5357948717948718,
|
| 4594 |
+
"grad_norm": 1.671875,
|
| 4595 |
+
"learning_rate": 4.6057372729626185e-06,
|
| 4596 |
+
"loss": 1.1505670547485352,
|
| 4597 |
+
"step": 653
|
| 4598 |
+
},
|
| 4599 |
+
{
|
| 4600 |
+
"epoch": 0.5366153846153846,
|
| 4601 |
+
"grad_norm": 1.6640625,
|
| 4602 |
+
"learning_rate": 4.592623867872248e-06,
|
| 4603 |
+
"loss": 1.1720384359359741,
|
| 4604 |
+
"step": 654
|
| 4605 |
+
},
|
| 4606 |
+
{
|
| 4607 |
+
"epoch": 0.5374358974358975,
|
| 4608 |
+
"grad_norm": 1.9765625,
|
| 4609 |
+
"learning_rate": 4.579513283024173e-06,
|
| 4610 |
+
"loss": 1.0870492458343506,
|
| 4611 |
+
"step": 655
|
| 4612 |
+
},
|
| 4613 |
+
{
|
| 4614 |
+
"epoch": 0.5382564102564102,
|
| 4615 |
+
"grad_norm": 1.6484375,
|
| 4616 |
+
"learning_rate": 4.566405609182247e-06,
|
| 4617 |
+
"loss": 1.0853990316390991,
|
| 4618 |
+
"step": 656
|
| 4619 |
+
},
|
| 4620 |
+
{
|
| 4621 |
+
"epoch": 0.5390769230769231,
|
| 4622 |
+
"grad_norm": 1.8203125,
|
| 4623 |
+
"learning_rate": 4.553300937090162e-06,
|
| 4624 |
+
"loss": 1.1348083019256592,
|
| 4625 |
+
"step": 657
|
| 4626 |
+
},
|
| 4627 |
+
{
|
| 4628 |
+
"epoch": 0.5398974358974359,
|
| 4629 |
+
"grad_norm": 1.921875,
|
| 4630 |
+
"learning_rate": 4.540199357470836e-06,
|
| 4631 |
+
"loss": 1.0526238679885864,
|
| 4632 |
+
"step": 658
|
| 4633 |
+
},
|
| 4634 |
+
{
|
| 4635 |
+
"epoch": 0.5407179487179488,
|
| 4636 |
+
"grad_norm": 1.7421875,
|
| 4637 |
+
"learning_rate": 4.527100961025776e-06,
|
| 4638 |
+
"loss": 1.1632277965545654,
|
| 4639 |
+
"step": 659
|
| 4640 |
+
},
|
| 4641 |
+
{
|
| 4642 |
+
"epoch": 0.5415384615384615,
|
| 4643 |
+
"grad_norm": 1.6796875,
|
| 4644 |
+
"learning_rate": 4.5140058384344505e-06,
|
| 4645 |
+
"loss": 1.0448962450027466,
|
| 4646 |
+
"step": 660
|
| 4647 |
+
},
|
| 4648 |
+
{
|
| 4649 |
+
"epoch": 0.5423589743589744,
|
| 4650 |
+
"grad_norm": 1.7578125,
|
| 4651 |
+
"learning_rate": 4.500914080353666e-06,
|
| 4652 |
+
"loss": 1.1400508880615234,
|
| 4653 |
+
"step": 661
|
| 4654 |
+
},
|
| 4655 |
+
{
|
| 4656 |
+
"epoch": 0.5431794871794872,
|
| 4657 |
+
"grad_norm": 1.78125,
|
| 4658 |
+
"learning_rate": 4.4878257774169345e-06,
|
| 4659 |
+
"loss": 1.1211259365081787,
|
| 4660 |
+
"step": 662
|
| 4661 |
+
},
|
| 4662 |
+
{
|
| 4663 |
+
"epoch": 0.544,
|
| 4664 |
+
"grad_norm": 1.828125,
|
| 4665 |
+
"learning_rate": 4.474741020233849e-06,
|
| 4666 |
+
"loss": 1.2164945602416992,
|
| 4667 |
+
"step": 663
|
| 4668 |
+
},
|
| 4669 |
+
{
|
| 4670 |
+
"epoch": 0.5448205128205128,
|
| 4671 |
+
"grad_norm": 1.828125,
|
| 4672 |
+
"learning_rate": 4.461659899389455e-06,
|
| 4673 |
+
"loss": 1.1144976615905762,
|
| 4674 |
+
"step": 664
|
| 4675 |
+
},
|
| 4676 |
+
{
|
| 4677 |
+
"epoch": 0.5456410256410257,
|
| 4678 |
+
"grad_norm": 1.796875,
|
| 4679 |
+
"learning_rate": 4.448582505443625e-06,
|
| 4680 |
+
"loss": 1.0874392986297607,
|
| 4681 |
+
"step": 665
|
| 4682 |
+
},
|
| 4683 |
+
{
|
| 4684 |
+
"epoch": 0.5464615384615384,
|
| 4685 |
+
"grad_norm": 1.640625,
|
| 4686 |
+
"learning_rate": 4.435508928930431e-06,
|
| 4687 |
+
"loss": 1.0897243022918701,
|
| 4688 |
+
"step": 666
|
| 4689 |
+
},
|
| 4690 |
+
{
|
| 4691 |
+
"epoch": 0.5472820512820513,
|
| 4692 |
+
"grad_norm": 1.671875,
|
| 4693 |
+
"learning_rate": 4.422439260357513e-06,
|
| 4694 |
+
"loss": 1.1446688175201416,
|
| 4695 |
+
"step": 667
|
| 4696 |
+
},
|
| 4697 |
+
{
|
| 4698 |
+
"epoch": 0.5481025641025641,
|
| 4699 |
+
"grad_norm": 1.7421875,
|
| 4700 |
+
"learning_rate": 4.40937359020546e-06,
|
| 4701 |
+
"loss": 1.0954184532165527,
|
| 4702 |
+
"step": 668
|
| 4703 |
+
},
|
| 4704 |
+
{
|
| 4705 |
+
"epoch": 0.548923076923077,
|
| 4706 |
+
"grad_norm": 1.9921875,
|
| 4707 |
+
"learning_rate": 4.396312008927182e-06,
|
| 4708 |
+
"loss": 1.1281894445419312,
|
| 4709 |
+
"step": 669
|
| 4710 |
+
},
|
| 4711 |
+
{
|
| 4712 |
+
"epoch": 0.5497435897435897,
|
| 4713 |
+
"grad_norm": 1.6640625,
|
| 4714 |
+
"learning_rate": 4.383254606947276e-06,
|
| 4715 |
+
"loss": 1.2130154371261597,
|
| 4716 |
+
"step": 670
|
| 4717 |
+
},
|
| 4718 |
+
{
|
| 4719 |
+
"epoch": 0.5505641025641026,
|
| 4720 |
+
"grad_norm": 1.78125,
|
| 4721 |
+
"learning_rate": 4.3702014746614135e-06,
|
| 4722 |
+
"loss": 1.1461225748062134,
|
| 4723 |
+
"step": 671
|
| 4724 |
+
},
|
| 4725 |
+
{
|
| 4726 |
+
"epoch": 0.5513846153846154,
|
| 4727 |
+
"grad_norm": 1.78125,
|
| 4728 |
+
"learning_rate": 4.357152702435699e-06,
|
| 4729 |
+
"loss": 1.156166672706604,
|
| 4730 |
+
"step": 672
|
| 4731 |
+
},
|
| 4732 |
+
{
|
| 4733 |
+
"epoch": 0.5522051282051282,
|
| 4734 |
+
"grad_norm": 1.796875,
|
| 4735 |
+
"learning_rate": 4.344108380606059e-06,
|
| 4736 |
+
"loss": 1.1747384071350098,
|
| 4737 |
+
"step": 673
|
| 4738 |
+
},
|
| 4739 |
+
{
|
| 4740 |
+
"epoch": 0.553025641025641,
|
| 4741 |
+
"grad_norm": 1.6953125,
|
| 4742 |
+
"learning_rate": 4.3310685994776055e-06,
|
| 4743 |
+
"loss": 1.260206937789917,
|
| 4744 |
+
"step": 674
|
| 4745 |
+
},
|
| 4746 |
+
{
|
| 4747 |
+
"epoch": 0.5538461538461539,
|
| 4748 |
+
"grad_norm": 1.7734375,
|
| 4749 |
+
"learning_rate": 4.318033449324019e-06,
|
| 4750 |
+
"loss": 1.0792930126190186,
|
| 4751 |
+
"step": 675
|
| 4752 |
+
},
|
| 4753 |
+
{
|
| 4754 |
+
"epoch": 0.5546666666666666,
|
| 4755 |
+
"grad_norm": 1.765625,
|
| 4756 |
+
"learning_rate": 4.305003020386922e-06,
|
| 4757 |
+
"loss": 1.0907093286514282,
|
| 4758 |
+
"step": 676
|
| 4759 |
+
},
|
| 4760 |
+
{
|
| 4761 |
+
"epoch": 0.5554871794871795,
|
| 4762 |
+
"grad_norm": 1.8359375,
|
| 4763 |
+
"learning_rate": 4.291977402875244e-06,
|
| 4764 |
+
"loss": 1.1744334697723389,
|
| 4765 |
+
"step": 677
|
| 4766 |
+
},
|
| 4767 |
+
{
|
| 4768 |
+
"epoch": 0.5563076923076923,
|
| 4769 |
+
"grad_norm": 1.7578125,
|
| 4770 |
+
"learning_rate": 4.278956686964611e-06,
|
| 4771 |
+
"loss": 1.1004900932312012,
|
| 4772 |
+
"step": 678
|
| 4773 |
+
},
|
| 4774 |
+
{
|
| 4775 |
+
"epoch": 0.5571282051282052,
|
| 4776 |
+
"grad_norm": 1.703125,
|
| 4777 |
+
"learning_rate": 4.265940962796717e-06,
|
| 4778 |
+
"loss": 1.1095056533813477,
|
| 4779 |
+
"step": 679
|
| 4780 |
+
},
|
| 4781 |
+
{
|
| 4782 |
+
"epoch": 0.5579487179487179,
|
| 4783 |
+
"grad_norm": 1.625,
|
| 4784 |
+
"learning_rate": 4.252930320478695e-06,
|
| 4785 |
+
"loss": 1.1838195323944092,
|
| 4786 |
+
"step": 680
|
| 4787 |
+
},
|
| 4788 |
+
{
|
| 4789 |
+
"epoch": 0.5587692307692308,
|
| 4790 |
+
"grad_norm": 1.71875,
|
| 4791 |
+
"learning_rate": 4.239924850082501e-06,
|
| 4792 |
+
"loss": 1.1503995656967163,
|
| 4793 |
+
"step": 681
|
| 4794 |
+
},
|
| 4795 |
+
{
|
| 4796 |
+
"epoch": 0.5595897435897436,
|
| 4797 |
+
"grad_norm": 1.75,
|
| 4798 |
+
"learning_rate": 4.226924641644277e-06,
|
| 4799 |
+
"loss": 1.2230197191238403,
|
| 4800 |
+
"step": 682
|
| 4801 |
+
},
|
| 4802 |
+
{
|
| 4803 |
+
"epoch": 0.5604102564102564,
|
| 4804 |
+
"grad_norm": 1.8359375,
|
| 4805 |
+
"learning_rate": 4.213929785163747e-06,
|
| 4806 |
+
"loss": 1.164717674255371,
|
| 4807 |
+
"step": 683
|
| 4808 |
+
},
|
| 4809 |
+
{
|
| 4810 |
+
"epoch": 0.5612307692307692,
|
| 4811 |
+
"grad_norm": 1.78125,
|
| 4812 |
+
"learning_rate": 4.2009403706035775e-06,
|
| 4813 |
+
"loss": 1.2025833129882812,
|
| 4814 |
+
"step": 684
|
| 4815 |
+
},
|
| 4816 |
+
{
|
| 4817 |
+
"epoch": 0.5620512820512821,
|
| 4818 |
+
"grad_norm": 1.9140625,
|
| 4819 |
+
"learning_rate": 4.187956487888764e-06,
|
| 4820 |
+
"loss": 1.0614598989486694,
|
| 4821 |
+
"step": 685
|
| 4822 |
+
},
|
| 4823 |
+
{
|
| 4824 |
+
"epoch": 0.5628717948717948,
|
| 4825 |
+
"grad_norm": 1.84375,
|
| 4826 |
+
"learning_rate": 4.1749782269060045e-06,
|
| 4827 |
+
"loss": 1.1531074047088623,
|
| 4828 |
+
"step": 686
|
| 4829 |
+
},
|
| 4830 |
+
{
|
| 4831 |
+
"epoch": 0.5636923076923077,
|
| 4832 |
+
"grad_norm": 1.921875,
|
| 4833 |
+
"learning_rate": 4.162005677503076e-06,
|
| 4834 |
+
"loss": 1.1412378549575806,
|
| 4835 |
+
"step": 687
|
| 4836 |
+
},
|
| 4837 |
+
{
|
| 4838 |
+
"epoch": 0.5645128205128205,
|
| 4839 |
+
"grad_norm": 1.7890625,
|
| 4840 |
+
"learning_rate": 4.149038929488218e-06,
|
| 4841 |
+
"loss": 1.0930665731430054,
|
| 4842 |
+
"step": 688
|
| 4843 |
+
},
|
| 4844 |
+
{
|
| 4845 |
+
"epoch": 0.5653333333333334,
|
| 4846 |
+
"grad_norm": 1.6796875,
|
| 4847 |
+
"learning_rate": 4.136078072629503e-06,
|
| 4848 |
+
"loss": 1.0276111364364624,
|
| 4849 |
+
"step": 689
|
| 4850 |
+
},
|
| 4851 |
+
{
|
| 4852 |
+
"epoch": 0.5661538461538461,
|
| 4853 |
+
"grad_norm": 1.6796875,
|
| 4854 |
+
"learning_rate": 4.123123196654224e-06,
|
| 4855 |
+
"loss": 1.173998236656189,
|
| 4856 |
+
"step": 690
|
| 4857 |
+
},
|
| 4858 |
+
{
|
| 4859 |
+
"epoch": 0.566974358974359,
|
| 4860 |
+
"grad_norm": 1.640625,
|
| 4861 |
+
"learning_rate": 4.110174391248268e-06,
|
| 4862 |
+
"loss": 1.1437534093856812,
|
| 4863 |
+
"step": 691
|
| 4864 |
+
},
|
| 4865 |
+
{
|
| 4866 |
+
"epoch": 0.5677948717948718,
|
| 4867 |
+
"grad_norm": 1.671875,
|
| 4868 |
+
"learning_rate": 4.097231746055491e-06,
|
| 4869 |
+
"loss": 1.099846363067627,
|
| 4870 |
+
"step": 692
|
| 4871 |
+
},
|
| 4872 |
+
{
|
| 4873 |
+
"epoch": 0.5686153846153846,
|
| 4874 |
+
"grad_norm": 1.6015625,
|
| 4875 |
+
"learning_rate": 4.084295350677106e-06,
|
| 4876 |
+
"loss": 1.1319067478179932,
|
| 4877 |
+
"step": 693
|
| 4878 |
+
},
|
| 4879 |
+
{
|
| 4880 |
+
"epoch": 0.5694358974358974,
|
| 4881 |
+
"grad_norm": 1.7265625,
|
| 4882 |
+
"learning_rate": 4.0713652946710595e-06,
|
| 4883 |
+
"loss": 1.1505883932113647,
|
| 4884 |
+
"step": 694
|
| 4885 |
+
},
|
| 4886 |
+
{
|
| 4887 |
+
"epoch": 0.5702564102564103,
|
| 4888 |
+
"grad_norm": 1.625,
|
| 4889 |
+
"learning_rate": 4.0584416675514104e-06,
|
| 4890 |
+
"loss": 1.109766960144043,
|
| 4891 |
+
"step": 695
|
| 4892 |
+
},
|
| 4893 |
+
{
|
| 4894 |
+
"epoch": 0.571076923076923,
|
| 4895 |
+
"grad_norm": 1.8671875,
|
| 4896 |
+
"learning_rate": 4.045524558787712e-06,
|
| 4897 |
+
"loss": 1.1238247156143188,
|
| 4898 |
+
"step": 696
|
| 4899 |
+
},
|
| 4900 |
+
{
|
| 4901 |
+
"epoch": 0.5718974358974359,
|
| 4902 |
+
"grad_norm": 1.6015625,
|
| 4903 |
+
"learning_rate": 4.032614057804385e-06,
|
| 4904 |
+
"loss": 1.0676288604736328,
|
| 4905 |
+
"step": 697
|
| 4906 |
+
},
|
| 4907 |
+
{
|
| 4908 |
+
"epoch": 0.5727179487179487,
|
| 4909 |
+
"grad_norm": 1.6953125,
|
| 4910 |
+
"learning_rate": 4.01971025398011e-06,
|
| 4911 |
+
"loss": 1.05299711227417,
|
| 4912 |
+
"step": 698
|
| 4913 |
+
},
|
| 4914 |
+
{
|
| 4915 |
+
"epoch": 0.5735384615384616,
|
| 4916 |
+
"grad_norm": 1.8984375,
|
| 4917 |
+
"learning_rate": 4.006813236647206e-06,
|
| 4918 |
+
"loss": 1.2232424020767212,
|
| 4919 |
+
"step": 699
|
| 4920 |
+
},
|
| 4921 |
+
{
|
| 4922 |
+
"epoch": 0.5743589743589743,
|
| 4923 |
+
"grad_norm": 1.734375,
|
| 4924 |
+
"learning_rate": 3.993923095091005e-06,
|
| 4925 |
+
"loss": 1.1072072982788086,
|
| 4926 |
+
"step": 700
|
| 4927 |
+
},
|
| 4928 |
+
{
|
| 4929 |
+
"epoch": 0.5751794871794872,
|
| 4930 |
+
"grad_norm": 1.6953125,
|
| 4931 |
+
"learning_rate": 3.9810399185492406e-06,
|
| 4932 |
+
"loss": 1.1366100311279297,
|
| 4933 |
+
"step": 701
|
| 4934 |
+
},
|
| 4935 |
+
{
|
| 4936 |
+
"epoch": 0.576,
|
| 4937 |
+
"grad_norm": 1.734375,
|
| 4938 |
+
"learning_rate": 3.968163796211425e-06,
|
| 4939 |
+
"loss": 1.2044111490249634,
|
| 4940 |
+
"step": 702
|
| 4941 |
+
},
|
| 4942 |
+
{
|
| 4943 |
+
"epoch": 0.5768205128205128,
|
| 4944 |
+
"grad_norm": 1.6640625,
|
| 4945 |
+
"learning_rate": 3.955294817218239e-06,
|
| 4946 |
+
"loss": 1.014880657196045,
|
| 4947 |
+
"step": 703
|
| 4948 |
+
},
|
| 4949 |
+
{
|
| 4950 |
+
"epoch": 0.5776410256410256,
|
| 4951 |
+
"grad_norm": 1.6875,
|
| 4952 |
+
"learning_rate": 3.9424330706609055e-06,
|
| 4953 |
+
"loss": 1.1580930948257446,
|
| 4954 |
+
"step": 704
|
| 4955 |
+
},
|
| 4956 |
+
{
|
| 4957 |
+
"epoch": 0.5784615384615385,
|
| 4958 |
+
"grad_norm": 1.7109375,
|
| 4959 |
+
"learning_rate": 3.929578645580583e-06,
|
| 4960 |
+
"loss": 1.1248693466186523,
|
| 4961 |
+
"step": 705
|
| 4962 |
+
},
|
| 4963 |
+
{
|
| 4964 |
+
"epoch": 0.5792820512820512,
|
| 4965 |
+
"grad_norm": 1.7734375,
|
| 4966 |
+
"learning_rate": 3.916731630967741e-06,
|
| 4967 |
+
"loss": 1.1421297788619995,
|
| 4968 |
+
"step": 706
|
| 4969 |
+
},
|
| 4970 |
+
{
|
| 4971 |
+
"epoch": 0.5801025641025641,
|
| 4972 |
+
"grad_norm": 1.7421875,
|
| 4973 |
+
"learning_rate": 3.903892115761545e-06,
|
| 4974 |
+
"loss": 1.1289352178573608,
|
| 4975 |
+
"step": 707
|
| 4976 |
+
},
|
| 4977 |
+
{
|
| 4978 |
+
"epoch": 0.5809230769230769,
|
| 4979 |
+
"grad_norm": 2.0,
|
| 4980 |
+
"learning_rate": 3.8910601888492444e-06,
|
| 4981 |
+
"loss": 1.2048262357711792,
|
| 4982 |
+
"step": 708
|
| 4983 |
+
},
|
| 4984 |
+
{
|
| 4985 |
+
"epoch": 0.5817435897435898,
|
| 4986 |
+
"grad_norm": 1.7265625,
|
| 4987 |
+
"learning_rate": 3.8782359390655566e-06,
|
| 4988 |
+
"loss": 1.1883631944656372,
|
| 4989 |
+
"step": 709
|
| 4990 |
+
},
|
| 4991 |
+
{
|
| 4992 |
+
"epoch": 0.5825641025641025,
|
| 4993 |
+
"grad_norm": 1.734375,
|
| 4994 |
+
"learning_rate": 3.865419455192049e-06,
|
| 4995 |
+
"loss": 1.1309034824371338,
|
| 4996 |
+
"step": 710
|
| 4997 |
+
},
|
| 4998 |
+
{
|
| 4999 |
+
"epoch": 0.5833846153846154,
|
| 5000 |
+
"grad_norm": 1.7890625,
|
| 5001 |
+
"learning_rate": 3.852610825956529e-06,
|
| 5002 |
+
"loss": 1.1083405017852783,
|
| 5003 |
+
"step": 711
|
| 5004 |
+
},
|
| 5005 |
+
{
|
| 5006 |
+
"epoch": 0.5842051282051282,
|
| 5007 |
+
"grad_norm": 1.75,
|
| 5008 |
+
"learning_rate": 3.8398101400324185e-06,
|
| 5009 |
+
"loss": 1.2035160064697266,
|
| 5010 |
+
"step": 712
|
| 5011 |
+
},
|
| 5012 |
+
{
|
| 5013 |
+
"epoch": 0.585025641025641,
|
| 5014 |
+
"grad_norm": 1.78125,
|
| 5015 |
+
"learning_rate": 3.827017486038157e-06,
|
| 5016 |
+
"loss": 1.1301109790802002,
|
| 5017 |
+
"step": 713
|
| 5018 |
+
},
|
| 5019 |
+
{
|
| 5020 |
+
"epoch": 0.5858461538461538,
|
| 5021 |
+
"grad_norm": 1.6875,
|
| 5022 |
+
"learning_rate": 3.8142329525365763e-06,
|
| 5023 |
+
"loss": 1.1090412139892578,
|
| 5024 |
+
"step": 714
|
| 5025 |
+
},
|
| 5026 |
+
{
|
| 5027 |
+
"epoch": 0.5866666666666667,
|
| 5028 |
+
"grad_norm": 1.7734375,
|
| 5029 |
+
"learning_rate": 3.8014566280342914e-06,
|
| 5030 |
+
"loss": 1.1583330631256104,
|
| 5031 |
+
"step": 715
|
| 5032 |
+
},
|
| 5033 |
+
{
|
| 5034 |
+
"epoch": 0.5874871794871794,
|
| 5035 |
+
"grad_norm": 1.875,
|
| 5036 |
+
"learning_rate": 3.788688600981085e-06,
|
| 5037 |
+
"loss": 1.0629702806472778,
|
| 5038 |
+
"step": 716
|
| 5039 |
+
},
|
| 5040 |
+
{
|
| 5041 |
+
"epoch": 0.5883076923076923,
|
| 5042 |
+
"grad_norm": 1.6171875,
|
| 5043 |
+
"learning_rate": 3.7759289597692984e-06,
|
| 5044 |
+
"loss": 1.0245894193649292,
|
| 5045 |
+
"step": 717
|
| 5046 |
+
},
|
| 5047 |
+
{
|
| 5048 |
+
"epoch": 0.5891282051282051,
|
| 5049 |
+
"grad_norm": 1.7890625,
|
| 5050 |
+
"learning_rate": 3.763177792733216e-06,
|
| 5051 |
+
"loss": 1.0865757465362549,
|
| 5052 |
+
"step": 718
|
| 5053 |
+
},
|
| 5054 |
+
{
|
| 5055 |
+
"epoch": 0.589948717948718,
|
| 5056 |
+
"grad_norm": 1.65625,
|
| 5057 |
+
"learning_rate": 3.750435188148459e-06,
|
| 5058 |
+
"loss": 1.1652759313583374,
|
| 5059 |
+
"step": 719
|
| 5060 |
+
},
|
| 5061 |
+
{
|
| 5062 |
+
"epoch": 0.5907692307692308,
|
| 5063 |
+
"grad_norm": 1.734375,
|
| 5064 |
+
"learning_rate": 3.7377012342313703e-06,
|
| 5065 |
+
"loss": 1.1769644021987915,
|
| 5066 |
+
"step": 720
|
| 5067 |
+
},
|
| 5068 |
+
{
|
| 5069 |
+
"epoch": 0.5915897435897436,
|
| 5070 |
+
"grad_norm": 1.96875,
|
| 5071 |
+
"learning_rate": 3.7249760191384055e-06,
|
| 5072 |
+
"loss": 1.164440631866455,
|
| 5073 |
+
"step": 721
|
| 5074 |
+
},
|
| 5075 |
+
{
|
| 5076 |
+
"epoch": 0.5924102564102565,
|
| 5077 |
+
"grad_norm": 1.859375,
|
| 5078 |
+
"learning_rate": 3.712259630965518e-06,
|
| 5079 |
+
"loss": 1.0393414497375488,
|
| 5080 |
+
"step": 722
|
| 5081 |
+
},
|
| 5082 |
+
{
|
| 5083 |
+
"epoch": 0.5932307692307692,
|
| 5084 |
+
"grad_norm": 1.703125,
|
| 5085 |
+
"learning_rate": 3.6995521577475567e-06,
|
| 5086 |
+
"loss": 1.0958324670791626,
|
| 5087 |
+
"step": 723
|
| 5088 |
+
},
|
| 5089 |
+
{
|
| 5090 |
+
"epoch": 0.5940512820512821,
|
| 5091 |
+
"grad_norm": 1.6953125,
|
| 5092 |
+
"learning_rate": 3.6868536874576515e-06,
|
| 5093 |
+
"loss": 1.1045432090759277,
|
| 5094 |
+
"step": 724
|
| 5095 |
+
},
|
| 5096 |
+
{
|
| 5097 |
+
"epoch": 0.5948717948717949,
|
| 5098 |
+
"grad_norm": 1.625,
|
| 5099 |
+
"learning_rate": 3.6741643080066065e-06,
|
| 5100 |
+
"loss": 1.0855437517166138,
|
| 5101 |
+
"step": 725
|
| 5102 |
+
},
|
| 5103 |
+
{
|
| 5104 |
+
"epoch": 0.5956923076923077,
|
| 5105 |
+
"grad_norm": 1.6875,
|
| 5106 |
+
"learning_rate": 3.6614841072422913e-06,
|
| 5107 |
+
"loss": 1.1596192121505737,
|
| 5108 |
+
"step": 726
|
| 5109 |
+
},
|
| 5110 |
+
{
|
| 5111 |
+
"epoch": 0.5965128205128205,
|
| 5112 |
+
"grad_norm": 1.796875,
|
| 5113 |
+
"learning_rate": 3.648813172949025e-06,
|
| 5114 |
+
"loss": 1.166650414466858,
|
| 5115 |
+
"step": 727
|
| 5116 |
+
},
|
| 5117 |
+
{
|
| 5118 |
+
"epoch": 0.5973333333333334,
|
| 5119 |
+
"grad_norm": 1.7421875,
|
| 5120 |
+
"learning_rate": 3.636151592846985e-06,
|
| 5121 |
+
"loss": 1.070565938949585,
|
| 5122 |
+
"step": 728
|
| 5123 |
+
},
|
| 5124 |
+
{
|
| 5125 |
+
"epoch": 0.5981538461538461,
|
| 5126 |
+
"grad_norm": 1.796875,
|
| 5127 |
+
"learning_rate": 3.6234994545915852e-06,
|
| 5128 |
+
"loss": 1.1748634576797485,
|
| 5129 |
+
"step": 729
|
| 5130 |
+
},
|
| 5131 |
+
{
|
| 5132 |
+
"epoch": 0.598974358974359,
|
| 5133 |
+
"grad_norm": 1.765625,
|
| 5134 |
+
"learning_rate": 3.610856845772873e-06,
|
| 5135 |
+
"loss": 1.1432057619094849,
|
| 5136 |
+
"step": 730
|
| 5137 |
+
},
|
| 5138 |
+
{
|
| 5139 |
+
"epoch": 0.5997948717948718,
|
| 5140 |
+
"grad_norm": 1.6875,
|
| 5141 |
+
"learning_rate": 3.5982238539149287e-06,
|
| 5142 |
+
"loss": 1.050528645515442,
|
| 5143 |
+
"step": 731
|
| 5144 |
+
},
|
| 5145 |
+
{
|
| 5146 |
+
"epoch": 0.6006153846153847,
|
| 5147 |
+
"grad_norm": 1.671875,
|
| 5148 |
+
"learning_rate": 3.585600566475251e-06,
|
| 5149 |
+
"loss": 1.152228832244873,
|
| 5150 |
+
"step": 732
|
| 5151 |
+
},
|
| 5152 |
+
{
|
| 5153 |
+
"epoch": 0.6006153846153847,
|
| 5154 |
+
"eval_loss": 1.1157219409942627,
|
| 5155 |
+
"eval_runtime": 25.5407,
|
| 5156 |
+
"eval_samples_per_second": 39.153,
|
| 5157 |
+
"eval_steps_per_second": 9.788,
|
| 5158 |
+
"step": 732
|
| 5159 |
}
|
| 5160 |
],
|
| 5161 |
"logging_steps": 1,
|
|
|
|
| 5175 |
"attributes": {}
|
| 5176 |
}
|
| 5177 |
},
|
| 5178 |
+
"total_flos": 1.9064226105770803e+17,
|
| 5179 |
"train_batch_size": 4,
|
| 5180 |
"trial_name": null,
|
| 5181 |
"trial_params": null
|