Training in progress, step 1517, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 80792096
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7f39286a4f8f787fe815e1e766f2dd3860143a0b91275a914c1c425917b7b664
|
| 3 |
size 80792096
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 41460084
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1db73066a1a52d83ba64180b87d50b1678d2884310a96382775638626e456e54
|
| 3 |
size 41460084
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:261becb69fcbd89f74ba385639b93962c6f221af9df352fc6d3b37b8251a2632
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0aaacf687b9fb89d72014fd4c36bcdabbeedfe921bb168b55b68dd93007d6077
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 380,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -8019,6 +8019,2645 @@
|
|
| 8019 |
"eval_samples_per_second": 32.277,
|
| 8020 |
"eval_steps_per_second": 16.141,
|
| 8021 |
"step": 1140
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8022 |
}
|
| 8023 |
],
|
| 8024 |
"logging_steps": 1,
|
|
@@ -8033,12 +10672,12 @@
|
|
| 8033 |
"should_evaluate": false,
|
| 8034 |
"should_log": false,
|
| 8035 |
"should_save": true,
|
| 8036 |
-
"should_training_stop":
|
| 8037 |
},
|
| 8038 |
"attributes": {}
|
| 8039 |
}
|
| 8040 |
},
|
| 8041 |
-
"total_flos": 2.
|
| 8042 |
"train_batch_size": 2,
|
| 8043 |
"trial_name": null,
|
| 8044 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.1101630296648633,
|
| 5 |
"eval_steps": 380,
|
| 6 |
+
"global_step": 1517,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 8019 |
"eval_samples_per_second": 32.277,
|
| 8020 |
"eval_steps_per_second": 16.141,
|
| 8021 |
"step": 1140
|
| 8022 |
+
},
|
| 8023 |
+
{
|
| 8024 |
+
"epoch": 0.08285828401292618,
|
| 8025 |
+
"grad_norm": 0.6884163618087769,
|
| 8026 |
+
"learning_rate": 2.9178852208858143e-05,
|
| 8027 |
+
"loss": 0.1964,
|
| 8028 |
+
"step": 1141
|
| 8029 |
+
},
|
| 8030 |
+
{
|
| 8031 |
+
"epoch": 0.08293090301731963,
|
| 8032 |
+
"grad_norm": 0.9253848791122437,
|
| 8033 |
+
"learning_rate": 2.9031828663667193e-05,
|
| 8034 |
+
"loss": 0.2425,
|
| 8035 |
+
"step": 1142
|
| 8036 |
+
},
|
| 8037 |
+
{
|
| 8038 |
+
"epoch": 0.08300352202171309,
|
| 8039 |
+
"grad_norm": 0.8351463079452515,
|
| 8040 |
+
"learning_rate": 2.888511353433274e-05,
|
| 8041 |
+
"loss": 0.6826,
|
| 8042 |
+
"step": 1143
|
| 8043 |
+
},
|
| 8044 |
+
{
|
| 8045 |
+
"epoch": 0.08307614102610653,
|
| 8046 |
+
"grad_norm": 1.1377897262573242,
|
| 8047 |
+
"learning_rate": 2.873870745845433e-05,
|
| 8048 |
+
"loss": 0.5722,
|
| 8049 |
+
"step": 1144
|
| 8050 |
+
},
|
| 8051 |
+
{
|
| 8052 |
+
"epoch": 0.08314876003049998,
|
| 8053 |
+
"grad_norm": 1.1056997776031494,
|
| 8054 |
+
"learning_rate": 2.8592611072288355e-05,
|
| 8055 |
+
"loss": 0.6941,
|
| 8056 |
+
"step": 1145
|
| 8057 |
+
},
|
| 8058 |
+
{
|
| 8059 |
+
"epoch": 0.08322137903489343,
|
| 8060 |
+
"grad_norm": 2.7529983520507812,
|
| 8061 |
+
"learning_rate": 2.8446825010745425e-05,
|
| 8062 |
+
"loss": 0.5797,
|
| 8063 |
+
"step": 1146
|
| 8064 |
+
},
|
| 8065 |
+
{
|
| 8066 |
+
"epoch": 0.08329399803928689,
|
| 8067 |
+
"grad_norm": 0.8631287813186646,
|
| 8068 |
+
"learning_rate": 2.8301349907387464e-05,
|
| 8069 |
+
"loss": 0.7076,
|
| 8070 |
+
"step": 1147
|
| 8071 |
+
},
|
| 8072 |
+
{
|
| 8073 |
+
"epoch": 0.08336661704368033,
|
| 8074 |
+
"grad_norm": 2.9340662956237793,
|
| 8075 |
+
"learning_rate": 2.8156186394425054e-05,
|
| 8076 |
+
"loss": 0.8671,
|
| 8077 |
+
"step": 1148
|
| 8078 |
+
},
|
| 8079 |
+
{
|
| 8080 |
+
"epoch": 0.08343923604807378,
|
| 8081 |
+
"grad_norm": 1.4846022129058838,
|
| 8082 |
+
"learning_rate": 2.801133510271463e-05,
|
| 8083 |
+
"loss": 0.2737,
|
| 8084 |
+
"step": 1149
|
| 8085 |
+
},
|
| 8086 |
+
{
|
| 8087 |
+
"epoch": 0.08351185505246723,
|
| 8088 |
+
"grad_norm": 0.9685707092285156,
|
| 8089 |
+
"learning_rate": 2.786679666175587e-05,
|
| 8090 |
+
"loss": 0.7686,
|
| 8091 |
+
"step": 1150
|
| 8092 |
+
},
|
| 8093 |
+
{
|
| 8094 |
+
"epoch": 0.08358447405686067,
|
| 8095 |
+
"grad_norm": 1.3316307067871094,
|
| 8096 |
+
"learning_rate": 2.772257169968866e-05,
|
| 8097 |
+
"loss": 1.254,
|
| 8098 |
+
"step": 1151
|
| 8099 |
+
},
|
| 8100 |
+
{
|
| 8101 |
+
"epoch": 0.08365709306125413,
|
| 8102 |
+
"grad_norm": 0.9437965154647827,
|
| 8103 |
+
"learning_rate": 2.7578660843290794e-05,
|
| 8104 |
+
"loss": 0.6208,
|
| 8105 |
+
"step": 1152
|
| 8106 |
+
},
|
| 8107 |
+
{
|
| 8108 |
+
"epoch": 0.08372971206564758,
|
| 8109 |
+
"grad_norm": 1.1192880868911743,
|
| 8110 |
+
"learning_rate": 2.7435064717974758e-05,
|
| 8111 |
+
"loss": 0.8395,
|
| 8112 |
+
"step": 1153
|
| 8113 |
+
},
|
| 8114 |
+
{
|
| 8115 |
+
"epoch": 0.08380233107004104,
|
| 8116 |
+
"grad_norm": 1.6144412755966187,
|
| 8117 |
+
"learning_rate": 2.7291783947785543e-05,
|
| 8118 |
+
"loss": 0.2636,
|
| 8119 |
+
"step": 1154
|
| 8120 |
+
},
|
| 8121 |
+
{
|
| 8122 |
+
"epoch": 0.08387495007443448,
|
| 8123 |
+
"grad_norm": 1.30912446975708,
|
| 8124 |
+
"learning_rate": 2.7148819155397388e-05,
|
| 8125 |
+
"loss": 0.2864,
|
| 8126 |
+
"step": 1155
|
| 8127 |
+
},
|
| 8128 |
+
{
|
| 8129 |
+
"epoch": 0.08394756907882793,
|
| 8130 |
+
"grad_norm": 0.5433367490768433,
|
| 8131 |
+
"learning_rate": 2.70061709621116e-05,
|
| 8132 |
+
"loss": 0.103,
|
| 8133 |
+
"step": 1156
|
| 8134 |
+
},
|
| 8135 |
+
{
|
| 8136 |
+
"epoch": 0.08402018808322138,
|
| 8137 |
+
"grad_norm": 1.2427325248718262,
|
| 8138 |
+
"learning_rate": 2.6863839987853356e-05,
|
| 8139 |
+
"loss": 0.5539,
|
| 8140 |
+
"step": 1157
|
| 8141 |
+
},
|
| 8142 |
+
{
|
| 8143 |
+
"epoch": 0.08409280708761482,
|
| 8144 |
+
"grad_norm": 0.6617911458015442,
|
| 8145 |
+
"learning_rate": 2.6721826851169463e-05,
|
| 8146 |
+
"loss": 0.2216,
|
| 8147 |
+
"step": 1158
|
| 8148 |
+
},
|
| 8149 |
+
{
|
| 8150 |
+
"epoch": 0.08416542609200828,
|
| 8151 |
+
"grad_norm": 1.1403348445892334,
|
| 8152 |
+
"learning_rate": 2.6580132169225335e-05,
|
| 8153 |
+
"loss": 0.5581,
|
| 8154 |
+
"step": 1159
|
| 8155 |
+
},
|
| 8156 |
+
{
|
| 8157 |
+
"epoch": 0.08423804509640173,
|
| 8158 |
+
"grad_norm": 1.0311286449432373,
|
| 8159 |
+
"learning_rate": 2.643875655780248e-05,
|
| 8160 |
+
"loss": 0.7779,
|
| 8161 |
+
"step": 1160
|
| 8162 |
+
},
|
| 8163 |
+
{
|
| 8164 |
+
"epoch": 0.08431066410079517,
|
| 8165 |
+
"grad_norm": 1.0697089433670044,
|
| 8166 |
+
"learning_rate": 2.6297700631295774e-05,
|
| 8167 |
+
"loss": 0.2106,
|
| 8168 |
+
"step": 1161
|
| 8169 |
+
},
|
| 8170 |
+
{
|
| 8171 |
+
"epoch": 0.08438328310518863,
|
| 8172 |
+
"grad_norm": 0.938318133354187,
|
| 8173 |
+
"learning_rate": 2.615696500271072e-05,
|
| 8174 |
+
"loss": 1.1687,
|
| 8175 |
+
"step": 1162
|
| 8176 |
+
},
|
| 8177 |
+
{
|
| 8178 |
+
"epoch": 0.08445590210958208,
|
| 8179 |
+
"grad_norm": 1.4174854755401611,
|
| 8180 |
+
"learning_rate": 2.6016550283661035e-05,
|
| 8181 |
+
"loss": 0.3599,
|
| 8182 |
+
"step": 1163
|
| 8183 |
+
},
|
| 8184 |
+
{
|
| 8185 |
+
"epoch": 0.08452852111397553,
|
| 8186 |
+
"grad_norm": 1.5187783241271973,
|
| 8187 |
+
"learning_rate": 2.587645708436559e-05,
|
| 8188 |
+
"loss": 0.4712,
|
| 8189 |
+
"step": 1164
|
| 8190 |
+
},
|
| 8191 |
+
{
|
| 8192 |
+
"epoch": 0.08460114011836897,
|
| 8193 |
+
"grad_norm": 2.356548547744751,
|
| 8194 |
+
"learning_rate": 2.5736686013646228e-05,
|
| 8195 |
+
"loss": 0.4465,
|
| 8196 |
+
"step": 1165
|
| 8197 |
+
},
|
| 8198 |
+
{
|
| 8199 |
+
"epoch": 0.08467375912276243,
|
| 8200 |
+
"grad_norm": 0.7357969284057617,
|
| 8201 |
+
"learning_rate": 2.5597237678924636e-05,
|
| 8202 |
+
"loss": 0.282,
|
| 8203 |
+
"step": 1166
|
| 8204 |
+
},
|
| 8205 |
+
{
|
| 8206 |
+
"epoch": 0.08474637812715588,
|
| 8207 |
+
"grad_norm": 2.6358189582824707,
|
| 8208 |
+
"learning_rate": 2.5458112686220136e-05,
|
| 8209 |
+
"loss": 0.8925,
|
| 8210 |
+
"step": 1167
|
| 8211 |
+
},
|
| 8212 |
+
{
|
| 8213 |
+
"epoch": 0.08481899713154932,
|
| 8214 |
+
"grad_norm": 0.7531256675720215,
|
| 8215 |
+
"learning_rate": 2.531931164014676e-05,
|
| 8216 |
+
"loss": 1.1206,
|
| 8217 |
+
"step": 1168
|
| 8218 |
+
},
|
| 8219 |
+
{
|
| 8220 |
+
"epoch": 0.08489161613594277,
|
| 8221 |
+
"grad_norm": 1.1901469230651855,
|
| 8222 |
+
"learning_rate": 2.5180835143910732e-05,
|
| 8223 |
+
"loss": 0.9059,
|
| 8224 |
+
"step": 1169
|
| 8225 |
+
},
|
| 8226 |
+
{
|
| 8227 |
+
"epoch": 0.08496423514033623,
|
| 8228 |
+
"grad_norm": 1.9718434810638428,
|
| 8229 |
+
"learning_rate": 2.5042683799307865e-05,
|
| 8230 |
+
"loss": 0.3537,
|
| 8231 |
+
"step": 1170
|
| 8232 |
+
},
|
| 8233 |
+
{
|
| 8234 |
+
"epoch": 0.08503685414472968,
|
| 8235 |
+
"grad_norm": 1.1613426208496094,
|
| 8236 |
+
"learning_rate": 2.4904858206720892e-05,
|
| 8237 |
+
"loss": 0.575,
|
| 8238 |
+
"step": 1171
|
| 8239 |
+
},
|
| 8240 |
+
{
|
| 8241 |
+
"epoch": 0.08510947314912312,
|
| 8242 |
+
"grad_norm": 1.022171974182129,
|
| 8243 |
+
"learning_rate": 2.4767358965116893e-05,
|
| 8244 |
+
"loss": 1.1401,
|
| 8245 |
+
"step": 1172
|
| 8246 |
+
},
|
| 8247 |
+
{
|
| 8248 |
+
"epoch": 0.08518209215351658,
|
| 8249 |
+
"grad_norm": 1.1656763553619385,
|
| 8250 |
+
"learning_rate": 2.4630186672044687e-05,
|
| 8251 |
+
"loss": 0.2715,
|
| 8252 |
+
"step": 1173
|
| 8253 |
+
},
|
| 8254 |
+
{
|
| 8255 |
+
"epoch": 0.08525471115791003,
|
| 8256 |
+
"grad_norm": 1.2858641147613525,
|
| 8257 |
+
"learning_rate": 2.4493341923632197e-05,
|
| 8258 |
+
"loss": 1.1828,
|
| 8259 |
+
"step": 1174
|
| 8260 |
+
},
|
| 8261 |
+
{
|
| 8262 |
+
"epoch": 0.08532733016230347,
|
| 8263 |
+
"grad_norm": 0.7832484245300293,
|
| 8264 |
+
"learning_rate": 2.4356825314583932e-05,
|
| 8265 |
+
"loss": 0.2169,
|
| 8266 |
+
"step": 1175
|
| 8267 |
+
},
|
| 8268 |
+
{
|
| 8269 |
+
"epoch": 0.08539994916669692,
|
| 8270 |
+
"grad_norm": 1.20249342918396,
|
| 8271 |
+
"learning_rate": 2.4220637438178317e-05,
|
| 8272 |
+
"loss": 0.7343,
|
| 8273 |
+
"step": 1176
|
| 8274 |
+
},
|
| 8275 |
+
{
|
| 8276 |
+
"epoch": 0.08547256817109038,
|
| 8277 |
+
"grad_norm": 0.957040548324585,
|
| 8278 |
+
"learning_rate": 2.408477888626527e-05,
|
| 8279 |
+
"loss": 0.6592,
|
| 8280 |
+
"step": 1177
|
| 8281 |
+
},
|
| 8282 |
+
{
|
| 8283 |
+
"epoch": 0.08554518717548383,
|
| 8284 |
+
"grad_norm": 0.5666554570198059,
|
| 8285 |
+
"learning_rate": 2.3949250249263332e-05,
|
| 8286 |
+
"loss": 0.2492,
|
| 8287 |
+
"step": 1178
|
| 8288 |
+
},
|
| 8289 |
+
{
|
| 8290 |
+
"epoch": 0.08561780617987727,
|
| 8291 |
+
"grad_norm": 1.3460291624069214,
|
| 8292 |
+
"learning_rate": 2.3814052116157492e-05,
|
| 8293 |
+
"loss": 0.9453,
|
| 8294 |
+
"step": 1179
|
| 8295 |
+
},
|
| 8296 |
+
{
|
| 8297 |
+
"epoch": 0.08569042518427072,
|
| 8298 |
+
"grad_norm": 0.9850242733955383,
|
| 8299 |
+
"learning_rate": 2.3679185074496247e-05,
|
| 8300 |
+
"loss": 0.5196,
|
| 8301 |
+
"step": 1180
|
| 8302 |
+
},
|
| 8303 |
+
{
|
| 8304 |
+
"epoch": 0.08576304418866418,
|
| 8305 |
+
"grad_norm": 0.9715597629547119,
|
| 8306 |
+
"learning_rate": 2.3544649710389334e-05,
|
| 8307 |
+
"loss": 0.4305,
|
| 8308 |
+
"step": 1181
|
| 8309 |
+
},
|
| 8310 |
+
{
|
| 8311 |
+
"epoch": 0.08583566319305762,
|
| 8312 |
+
"grad_norm": 0.8512092232704163,
|
| 8313 |
+
"learning_rate": 2.3410446608505042e-05,
|
| 8314 |
+
"loss": 0.7163,
|
| 8315 |
+
"step": 1182
|
| 8316 |
+
},
|
| 8317 |
+
{
|
| 8318 |
+
"epoch": 0.08590828219745107,
|
| 8319 |
+
"grad_norm": 1.2743794918060303,
|
| 8320 |
+
"learning_rate": 2.3276576352067724e-05,
|
| 8321 |
+
"loss": 0.8088,
|
| 8322 |
+
"step": 1183
|
| 8323 |
+
},
|
| 8324 |
+
{
|
| 8325 |
+
"epoch": 0.08598090120184453,
|
| 8326 |
+
"grad_norm": 0.8128895163536072,
|
| 8327 |
+
"learning_rate": 2.3143039522855202e-05,
|
| 8328 |
+
"loss": 0.3012,
|
| 8329 |
+
"step": 1184
|
| 8330 |
+
},
|
| 8331 |
+
{
|
| 8332 |
+
"epoch": 0.08605352020623797,
|
| 8333 |
+
"grad_norm": 0.886959969997406,
|
| 8334 |
+
"learning_rate": 2.300983670119632e-05,
|
| 8335 |
+
"loss": 0.686,
|
| 8336 |
+
"step": 1185
|
| 8337 |
+
},
|
| 8338 |
+
{
|
| 8339 |
+
"epoch": 0.08612613921063142,
|
| 8340 |
+
"grad_norm": 0.6363818645477295,
|
| 8341 |
+
"learning_rate": 2.2876968465968353e-05,
|
| 8342 |
+
"loss": 0.3435,
|
| 8343 |
+
"step": 1186
|
| 8344 |
+
},
|
| 8345 |
+
{
|
| 8346 |
+
"epoch": 0.08619875821502487,
|
| 8347 |
+
"grad_norm": 0.7743397355079651,
|
| 8348 |
+
"learning_rate": 2.2744435394594497e-05,
|
| 8349 |
+
"loss": 0.2841,
|
| 8350 |
+
"step": 1187
|
| 8351 |
+
},
|
| 8352 |
+
{
|
| 8353 |
+
"epoch": 0.08627137721941833,
|
| 8354 |
+
"grad_norm": 1.243152141571045,
|
| 8355 |
+
"learning_rate": 2.2612238063041512e-05,
|
| 8356 |
+
"loss": 0.9792,
|
| 8357 |
+
"step": 1188
|
| 8358 |
+
},
|
| 8359 |
+
{
|
| 8360 |
+
"epoch": 0.08634399622381177,
|
| 8361 |
+
"grad_norm": 0.7745580077171326,
|
| 8362 |
+
"learning_rate": 2.248037704581686e-05,
|
| 8363 |
+
"loss": 0.8737,
|
| 8364 |
+
"step": 1189
|
| 8365 |
+
},
|
| 8366 |
+
{
|
| 8367 |
+
"epoch": 0.08641661522820522,
|
| 8368 |
+
"grad_norm": 0.7934034466743469,
|
| 8369 |
+
"learning_rate": 2.2348852915966702e-05,
|
| 8370 |
+
"loss": 0.7298,
|
| 8371 |
+
"step": 1190
|
| 8372 |
+
},
|
| 8373 |
+
{
|
| 8374 |
+
"epoch": 0.08648923423259867,
|
| 8375 |
+
"grad_norm": 1.9116917848587036,
|
| 8376 |
+
"learning_rate": 2.2217666245072865e-05,
|
| 8377 |
+
"loss": 0.8374,
|
| 8378 |
+
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.08656185323699211,
|
| 8382 |
+
"grad_norm": 5.782865524291992,
|
| 8383 |
+
"learning_rate": 2.208681760325092e-05,
|
| 8384 |
+
"loss": 0.6376,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.08663447224138557,
|
| 8389 |
+
"grad_norm": 1.5687942504882812,
|
| 8390 |
+
"learning_rate": 2.195630755914716e-05,
|
| 8391 |
+
"loss": 1.1558,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.08670709124577902,
|
| 8396 |
+
"grad_norm": 1.0691802501678467,
|
| 8397 |
+
"learning_rate": 2.1826136679936606e-05,
|
| 8398 |
+
"loss": 1.0076,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.08677971025017248,
|
| 8403 |
+
"grad_norm": 0.8438729643821716,
|
| 8404 |
+
"learning_rate": 2.1696305531320117e-05,
|
| 8405 |
+
"loss": 0.182,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.08685232925456592,
|
| 8410 |
+
"grad_norm": 0.797114372253418,
|
| 8411 |
+
"learning_rate": 2.156681467752234e-05,
|
| 8412 |
+
"loss": 0.4186,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.08692494825895937,
|
| 8417 |
+
"grad_norm": 1.413577675819397,
|
| 8418 |
+
"learning_rate": 2.1437664681288926e-05,
|
| 8419 |
+
"loss": 0.7252,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.08699756726335282,
|
| 8424 |
+
"grad_norm": 1.3715764284133911,
|
| 8425 |
+
"learning_rate": 2.130885610388428e-05,
|
| 8426 |
+
"loss": 0.7877,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.08707018626774626,
|
| 8431 |
+
"grad_norm": 0.7809646129608154,
|
| 8432 |
+
"learning_rate": 2.1180389505089004e-05,
|
| 8433 |
+
"loss": 0.6832,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.08714280527213972,
|
| 8438 |
+
"grad_norm": 0.6671550869941711,
|
| 8439 |
+
"learning_rate": 2.105226544319756e-05,
|
| 8440 |
+
"loss": 0.5012,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.08721542427653317,
|
| 8445 |
+
"grad_norm": 1.0234498977661133,
|
| 8446 |
+
"learning_rate": 2.092448447501578e-05,
|
| 8447 |
+
"loss": 0.2274,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.08728804328092662,
|
| 8452 |
+
"grad_norm": 1.8863704204559326,
|
| 8453 |
+
"learning_rate": 2.0797047155858497e-05,
|
| 8454 |
+
"loss": 0.9966,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.08736066228532006,
|
| 8459 |
+
"grad_norm": 0.28279760479927063,
|
| 8460 |
+
"learning_rate": 2.0669954039547058e-05,
|
| 8461 |
+
"loss": 0.5131,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.08743328128971352,
|
| 8466 |
+
"grad_norm": 1.0728230476379395,
|
| 8467 |
+
"learning_rate": 2.0543205678406997e-05,
|
| 8468 |
+
"loss": 0.5508,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.08750590029410697,
|
| 8473 |
+
"grad_norm": 1.0050767660140991,
|
| 8474 |
+
"learning_rate": 2.0416802623265595e-05,
|
| 8475 |
+
"loss": 0.9289,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.08757851929850041,
|
| 8480 |
+
"grad_norm": 1.5100905895233154,
|
| 8481 |
+
"learning_rate": 2.0290745423449465e-05,
|
| 8482 |
+
"loss": 0.3662,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.08765113830289387,
|
| 8487 |
+
"grad_norm": 1.3642252683639526,
|
| 8488 |
+
"learning_rate": 2.016503462678222e-05,
|
| 8489 |
+
"loss": 0.3912,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.08772375730728732,
|
| 8494 |
+
"grad_norm": 1.1979995965957642,
|
| 8495 |
+
"learning_rate": 2.0039670779582066e-05,
|
| 8496 |
+
"loss": 0.4938,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.08779637631168076,
|
| 8501 |
+
"grad_norm": 1.4360976219177246,
|
| 8502 |
+
"learning_rate": 1.9914654426659374e-05,
|
| 8503 |
+
"loss": 0.7024,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.08786899531607421,
|
| 8508 |
+
"grad_norm": 0.74053555727005,
|
| 8509 |
+
"learning_rate": 1.9789986111314427e-05,
|
| 8510 |
+
"loss": 0.7933,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.08794161432046767,
|
| 8515 |
+
"grad_norm": 1.336371898651123,
|
| 8516 |
+
"learning_rate": 1.9665666375334967e-05,
|
| 8517 |
+
"loss": 0.3608,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.08801423332486112,
|
| 8522 |
+
"grad_norm": 1.8578364849090576,
|
| 8523 |
+
"learning_rate": 1.954169575899386e-05,
|
| 8524 |
+
"loss": 0.5102,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.08808685232925456,
|
| 8529 |
+
"grad_norm": 1.1358493566513062,
|
| 8530 |
+
"learning_rate": 1.9418074801046758e-05,
|
| 8531 |
+
"loss": 0.2065,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.08815947133364802,
|
| 8536 |
+
"grad_norm": 1.3235039710998535,
|
| 8537 |
+
"learning_rate": 1.9294804038729743e-05,
|
| 8538 |
+
"loss": 0.3946,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.08823209033804147,
|
| 8543 |
+
"grad_norm": 0.5844933986663818,
|
| 8544 |
+
"learning_rate": 1.9171884007757092e-05,
|
| 8545 |
+
"loss": 0.0777,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.08830470934243491,
|
| 8550 |
+
"grad_norm": 1.4999133348464966,
|
| 8551 |
+
"learning_rate": 1.90493152423187e-05,
|
| 8552 |
+
"loss": 0.4892,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.08837732834682836,
|
| 8557 |
+
"grad_norm": 1.1905450820922852,
|
| 8558 |
+
"learning_rate": 1.8927098275078115e-05,
|
| 8559 |
+
"loss": 1.0155,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.08844994735122182,
|
| 8564 |
+
"grad_norm": 1.0663069486618042,
|
| 8565 |
+
"learning_rate": 1.880523363716983e-05,
|
| 8566 |
+
"loss": 0.334,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.08852256635561527,
|
| 8571 |
+
"grad_norm": 0.927708089351654,
|
| 8572 |
+
"learning_rate": 1.8683721858197366e-05,
|
| 8573 |
+
"loss": 0.3969,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.08859518536000871,
|
| 8578 |
+
"grad_norm": 0.7776638269424438,
|
| 8579 |
+
"learning_rate": 1.8562563466230576e-05,
|
| 8580 |
+
"loss": 0.5098,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.08866780436440216,
|
| 8585 |
+
"grad_norm": 1.2193666696548462,
|
| 8586 |
+
"learning_rate": 1.844175898780377e-05,
|
| 8587 |
+
"loss": 0.3793,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.08874042336879562,
|
| 8592 |
+
"grad_norm": 0.39878934621810913,
|
| 8593 |
+
"learning_rate": 1.8321308947912995e-05,
|
| 8594 |
+
"loss": 0.2213,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.08881304237318906,
|
| 8599 |
+
"grad_norm": 1.232279896736145,
|
| 8600 |
+
"learning_rate": 1.8201213870014168e-05,
|
| 8601 |
+
"loss": 0.7453,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.08888566137758251,
|
| 8606 |
+
"grad_norm": 1.45534086227417,
|
| 8607 |
+
"learning_rate": 1.8081474276020406e-05,
|
| 8608 |
+
"loss": 0.352,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.08895828038197597,
|
| 8613 |
+
"grad_norm": 0.7231876254081726,
|
| 8614 |
+
"learning_rate": 1.7962090686300115e-05,
|
| 8615 |
+
"loss": 0.6815,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.08903089938636942,
|
| 8620 |
+
"grad_norm": 1.189754605293274,
|
| 8621 |
+
"learning_rate": 1.7843063619674472e-05,
|
| 8622 |
+
"loss": 0.5027,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.08910351839076286,
|
| 8627 |
+
"grad_norm": 1.812227487564087,
|
| 8628 |
+
"learning_rate": 1.77243935934153e-05,
|
| 8629 |
+
"loss": 0.4439,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.08917613739515631,
|
| 8634 |
+
"grad_norm": 1.3484034538269043,
|
| 8635 |
+
"learning_rate": 1.7606081123242778e-05,
|
| 8636 |
+
"loss": 0.7234,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.08924875639954977,
|
| 8641 |
+
"grad_norm": 1.3264414072036743,
|
| 8642 |
+
"learning_rate": 1.7488126723323183e-05,
|
| 8643 |
+
"loss": 1.5874,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.0893213754039432,
|
| 8648 |
+
"grad_norm": 2.067748785018921,
|
| 8649 |
+
"learning_rate": 1.7370530906266725e-05,
|
| 8650 |
+
"loss": 0.9159,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.08939399440833666,
|
| 8655 |
+
"grad_norm": 0.28518739342689514,
|
| 8656 |
+
"learning_rate": 1.7253294183125223e-05,
|
| 8657 |
+
"loss": 0.0617,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.08946661341273011,
|
| 8662 |
+
"grad_norm": 3.28539776802063,
|
| 8663 |
+
"learning_rate": 1.7136417063389964e-05,
|
| 8664 |
+
"loss": 0.2846,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.08953923241712355,
|
| 8669 |
+
"grad_norm": 1.326446533203125,
|
| 8670 |
+
"learning_rate": 1.7019900054989446e-05,
|
| 8671 |
+
"loss": 0.2148,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.08961185142151701,
|
| 8676 |
+
"grad_norm": 0.41123178601264954,
|
| 8677 |
+
"learning_rate": 1.6903743664287198e-05,
|
| 8678 |
+
"loss": 0.4878,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.08968447042591046,
|
| 8683 |
+
"grad_norm": 2.5448060035705566,
|
| 8684 |
+
"learning_rate": 1.6787948396079545e-05,
|
| 8685 |
+
"loss": 0.4526,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.08975708943030392,
|
| 8690 |
+
"grad_norm": 0.6389909982681274,
|
| 8691 |
+
"learning_rate": 1.6672514753593438e-05,
|
| 8692 |
+
"loss": 0.1463,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.08982970843469736,
|
| 8697 |
+
"grad_norm": 1.3070532083511353,
|
| 8698 |
+
"learning_rate": 1.6557443238484284e-05,
|
| 8699 |
+
"loss": 0.277,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.08990232743909081,
|
| 8704 |
+
"grad_norm": 0.25405797362327576,
|
| 8705 |
+
"learning_rate": 1.6442734350833722e-05,
|
| 8706 |
+
"loss": 0.1553,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.08997494644348426,
|
| 8711 |
+
"grad_norm": 1.1293165683746338,
|
| 8712 |
+
"learning_rate": 1.632838858914747e-05,
|
| 8713 |
+
"loss": 0.3383,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.0900475654478777,
|
| 8718 |
+
"grad_norm": 1.3277782201766968,
|
| 8719 |
+
"learning_rate": 1.6214406450353183e-05,
|
| 8720 |
+
"loss": 0.1202,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.09012018445227116,
|
| 8725 |
+
"grad_norm": 0.8462746143341064,
|
| 8726 |
+
"learning_rate": 1.610078842979824e-05,
|
| 8727 |
+
"loss": 0.3038,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.09019280345666461,
|
| 8732 |
+
"grad_norm": 1.0028167963027954,
|
| 8733 |
+
"learning_rate": 1.5987535021247667e-05,
|
| 8734 |
+
"loss": 0.3235,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.09026542246105806,
|
| 8739 |
+
"grad_norm": 1.2284901142120361,
|
| 8740 |
+
"learning_rate": 1.587464671688187e-05,
|
| 8741 |
+
"loss": 0.3864,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.0903380414654515,
|
| 8746 |
+
"grad_norm": 0.5665420889854431,
|
| 8747 |
+
"learning_rate": 1.576212400729472e-05,
|
| 8748 |
+
"loss": 0.1293,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.09041066046984496,
|
| 8753 |
+
"grad_norm": 0.7567048072814941,
|
| 8754 |
+
"learning_rate": 1.564996738149106e-05,
|
| 8755 |
+
"loss": 0.2426,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.09048327947423841,
|
| 8760 |
+
"grad_norm": 1.367139458656311,
|
| 8761 |
+
"learning_rate": 1.5538177326885027e-05,
|
| 8762 |
+
"loss": 0.8348,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.09055589847863185,
|
| 8767 |
+
"grad_norm": 0.7689369320869446,
|
| 8768 |
+
"learning_rate": 1.5426754329297476e-05,
|
| 8769 |
+
"loss": 0.494,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.0906285174830253,
|
| 8774 |
+
"grad_norm": 0.5409727096557617,
|
| 8775 |
+
"learning_rate": 1.5315698872954298e-05,
|
| 8776 |
+
"loss": 0.2656,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.09070113648741876,
|
| 8781 |
+
"grad_norm": 1.1950929164886475,
|
| 8782 |
+
"learning_rate": 1.5205011440483929e-05,
|
| 8783 |
+
"loss": 0.285,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.09077375549181221,
|
| 8788 |
+
"grad_norm": 1.4262182712554932,
|
| 8789 |
+
"learning_rate": 1.509469251291562e-05,
|
| 8790 |
+
"loss": 0.3933,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.09084637449620565,
|
| 8795 |
+
"grad_norm": 0.5352879762649536,
|
| 8796 |
+
"learning_rate": 1.4984742569676958e-05,
|
| 8797 |
+
"loss": 0.2197,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.09091899350059911,
|
| 8802 |
+
"grad_norm": 0.2516248822212219,
|
| 8803 |
+
"learning_rate": 1.4875162088592154e-05,
|
| 8804 |
+
"loss": 0.0626,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.09099161250499256,
|
| 8809 |
+
"grad_norm": 0.8240694999694824,
|
| 8810 |
+
"learning_rate": 1.476595154587973e-05,
|
| 8811 |
+
"loss": 0.3354,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.091064231509386,
|
| 8816 |
+
"grad_norm": 0.5404168963432312,
|
| 8817 |
+
"learning_rate": 1.4657111416150505e-05,
|
| 8818 |
+
"loss": 0.3098,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.09113685051377945,
|
| 8823 |
+
"grad_norm": 0.7454471588134766,
|
| 8824 |
+
"learning_rate": 1.4548642172405569e-05,
|
| 8825 |
+
"loss": 0.2495,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.09120946951817291,
|
| 8830 |
+
"grad_norm": 0.9600757360458374,
|
| 8831 |
+
"learning_rate": 1.4440544286034185e-05,
|
| 8832 |
+
"loss": 0.2985,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.09128208852256636,
|
| 8837 |
+
"grad_norm": 1.1442216634750366,
|
| 8838 |
+
"learning_rate": 1.4332818226811807e-05,
|
| 8839 |
+
"loss": 0.7581,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.0913547075269598,
|
| 8844 |
+
"grad_norm": 1.5297975540161133,
|
| 8845 |
+
"learning_rate": 1.4225464462897876e-05,
|
| 8846 |
+
"loss": 1.0912,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.09142732653135326,
|
| 8851 |
+
"grad_norm": 2.290416955947876,
|
| 8852 |
+
"learning_rate": 1.4118483460834064e-05,
|
| 8853 |
+
"loss": 0.4993,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.09149994553574671,
|
| 8858 |
+
"grad_norm": 2.1155903339385986,
|
| 8859 |
+
"learning_rate": 1.4011875685541898e-05,
|
| 8860 |
+
"loss": 0.6808,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.09157256454014015,
|
| 8865 |
+
"grad_norm": 1.0581969022750854,
|
| 8866 |
+
"learning_rate": 1.3905641600321128e-05,
|
| 8867 |
+
"loss": 0.4101,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.0916451835445336,
|
| 8872 |
+
"grad_norm": 0.7325427532196045,
|
| 8873 |
+
"learning_rate": 1.37997816668473e-05,
|
| 8874 |
+
"loss": 0.2992,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.09171780254892706,
|
| 8879 |
+
"grad_norm": 1.0724326372146606,
|
| 8880 |
+
"learning_rate": 1.3694296345170122e-05,
|
| 8881 |
+
"loss": 1.1187,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.0917904215533205,
|
| 8886 |
+
"grad_norm": 1.883070707321167,
|
| 8887 |
+
"learning_rate": 1.3589186093711226e-05,
|
| 8888 |
+
"loss": 0.568,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.09186304055771395,
|
| 8893 |
+
"grad_norm": 1.4329851865768433,
|
| 8894 |
+
"learning_rate": 1.3484451369262252e-05,
|
| 8895 |
+
"loss": 0.6215,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.0919356595621074,
|
| 8900 |
+
"grad_norm": 2.0400278568267822,
|
| 8901 |
+
"learning_rate": 1.3380092626982887e-05,
|
| 8902 |
+
"loss": 1.1056,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.09200827856650086,
|
| 8907 |
+
"grad_norm": 0.49863314628601074,
|
| 8908 |
+
"learning_rate": 1.327611032039885e-05,
|
| 8909 |
+
"loss": 0.2798,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.0920808975708943,
|
| 8914 |
+
"grad_norm": 2.3871212005615234,
|
| 8915 |
+
"learning_rate": 1.3172504901399919e-05,
|
| 8916 |
+
"loss": 0.5128,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.09215351657528775,
|
| 8921 |
+
"grad_norm": 1.496303915977478,
|
| 8922 |
+
"learning_rate": 1.3069276820237997e-05,
|
| 8923 |
+
"loss": 1.0884,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.0922261355796812,
|
| 8928 |
+
"grad_norm": 0.7066709399223328,
|
| 8929 |
+
"learning_rate": 1.2966426525525144e-05,
|
| 8930 |
+
"loss": 0.445,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.09229875458407465,
|
| 8935 |
+
"grad_norm": 1.0572446584701538,
|
| 8936 |
+
"learning_rate": 1.286395446423162e-05,
|
| 8937 |
+
"loss": 0.4805,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.0923713735884681,
|
| 8942 |
+
"grad_norm": 0.8293216824531555,
|
| 8943 |
+
"learning_rate": 1.2761861081683913e-05,
|
| 8944 |
+
"loss": 0.1842,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.09244399259286155,
|
| 8949 |
+
"grad_norm": 1.265493392944336,
|
| 8950 |
+
"learning_rate": 1.2660146821562934e-05,
|
| 8951 |
+
"loss": 0.4421,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.09251661159725501,
|
| 8956 |
+
"grad_norm": 1.175404667854309,
|
| 8957 |
+
"learning_rate": 1.255881212590183e-05,
|
| 8958 |
+
"loss": 0.2995,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.09258923060164845,
|
| 8963 |
+
"grad_norm": 1.3074171543121338,
|
| 8964 |
+
"learning_rate": 1.2457857435084408e-05,
|
| 8965 |
+
"loss": 0.6294,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.0926618496060419,
|
| 8970 |
+
"grad_norm": 2.1092562675476074,
|
| 8971 |
+
"learning_rate": 1.2357283187842873e-05,
|
| 8972 |
+
"loss": 0.6154,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.09273446861043536,
|
| 8977 |
+
"grad_norm": 1.2864784002304077,
|
| 8978 |
+
"learning_rate": 1.2257089821256196e-05,
|
| 8979 |
+
"loss": 0.9862,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.0928070876148288,
|
| 8984 |
+
"grad_norm": 1.4146101474761963,
|
| 8985 |
+
"learning_rate": 1.2157277770748044e-05,
|
| 8986 |
+
"loss": 0.6061,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.09287970661922225,
|
| 8991 |
+
"grad_norm": 0.9301030039787292,
|
| 8992 |
+
"learning_rate": 1.2057847470084993e-05,
|
| 8993 |
+
"loss": 0.8097,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.0929523256236157,
|
| 8998 |
+
"grad_norm": 0.821770966053009,
|
| 8999 |
+
"learning_rate": 1.195879935137455e-05,
|
| 9000 |
+
"loss": 0.6524,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.09302494462800916,
|
| 9005 |
+
"grad_norm": 0.928987443447113,
|
| 9006 |
+
"learning_rate": 1.1860133845063349e-05,
|
| 9007 |
+
"loss": 0.8068,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.0930975636324026,
|
| 9012 |
+
"grad_norm": 1.009806513786316,
|
| 9013 |
+
"learning_rate": 1.1761851379935306e-05,
|
| 9014 |
+
"loss": 0.1709,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.09317018263679605,
|
| 9019 |
+
"grad_norm": 1.4650064706802368,
|
| 9020 |
+
"learning_rate": 1.1663952383109567e-05,
|
| 9021 |
+
"loss": 0.5689,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.0932428016411895,
|
| 9026 |
+
"grad_norm": 0.8833450675010681,
|
| 9027 |
+
"learning_rate": 1.1566437280038955e-05,
|
| 9028 |
+
"loss": 0.3229,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.09331542064558294,
|
| 9033 |
+
"grad_norm": 0.7067506313323975,
|
| 9034 |
+
"learning_rate": 1.1469306494507793e-05,
|
| 9035 |
+
"loss": 0.3529,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.0933880396499764,
|
| 9040 |
+
"grad_norm": 1.1283503770828247,
|
| 9041 |
+
"learning_rate": 1.1372560448630376e-05,
|
| 9042 |
+
"loss": 0.9246,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.09346065865436985,
|
| 9047 |
+
"grad_norm": 1.1356388330459595,
|
| 9048 |
+
"learning_rate": 1.1276199562848844e-05,
|
| 9049 |
+
"loss": 0.819,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.09353327765876329,
|
| 9054 |
+
"grad_norm": 1.6198071241378784,
|
| 9055 |
+
"learning_rate": 1.1180224255931626e-05,
|
| 9056 |
+
"loss": 0.247,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.09360589666315675,
|
| 9061 |
+
"grad_norm": 1.1830090284347534,
|
| 9062 |
+
"learning_rate": 1.108463494497135e-05,
|
| 9063 |
+
"loss": 0.5414,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.0936785156675502,
|
| 9068 |
+
"grad_norm": 1.3371515274047852,
|
| 9069 |
+
"learning_rate": 1.0989432045383318e-05,
|
| 9070 |
+
"loss": 0.4783,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.09375113467194365,
|
| 9075 |
+
"grad_norm": 0.21214567124843597,
|
| 9076 |
+
"learning_rate": 1.0894615970903454e-05,
|
| 9077 |
+
"loss": 0.0568,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.0938237536763371,
|
| 9082 |
+
"grad_norm": 0.9080774188041687,
|
| 9083 |
+
"learning_rate": 1.0800187133586626e-05,
|
| 9084 |
+
"loss": 0.1972,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.09389637268073055,
|
| 9089 |
+
"grad_norm": 1.6248502731323242,
|
| 9090 |
+
"learning_rate": 1.0706145943804846e-05,
|
| 9091 |
+
"loss": 0.5891,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.093968991685124,
|
| 9096 |
+
"grad_norm": 1.8062894344329834,
|
| 9097 |
+
"learning_rate": 1.0612492810245466e-05,
|
| 9098 |
+
"loss": 1.0032,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.09404161068951744,
|
| 9103 |
+
"grad_norm": 1.8059346675872803,
|
| 9104 |
+
"learning_rate": 1.0519228139909398e-05,
|
| 9105 |
+
"loss": 0.7242,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.0941142296939109,
|
| 9110 |
+
"grad_norm": 0.9609408378601074,
|
| 9111 |
+
"learning_rate": 1.042635233810939e-05,
|
| 9112 |
+
"loss": 0.3979,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.09418684869830435,
|
| 9117 |
+
"grad_norm": 1.0131604671478271,
|
| 9118 |
+
"learning_rate": 1.0333865808468202e-05,
|
| 9119 |
+
"loss": 0.3565,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.0942594677026978,
|
| 9124 |
+
"grad_norm": 0.8987993597984314,
|
| 9125 |
+
"learning_rate": 1.0241768952916897e-05,
|
| 9126 |
+
"loss": 0.1929,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.09433208670709124,
|
| 9131 |
+
"grad_norm": 1.3016245365142822,
|
| 9132 |
+
"learning_rate": 1.0150062171693076e-05,
|
| 9133 |
+
"loss": 1.1127,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.0944047057114847,
|
| 9138 |
+
"grad_norm": 0.77074134349823,
|
| 9139 |
+
"learning_rate": 1.0058745863339137e-05,
|
| 9140 |
+
"loss": 0.1527,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.09447732471587815,
|
| 9145 |
+
"grad_norm": 1.5303739309310913,
|
| 9146 |
+
"learning_rate": 9.967820424700547e-06,
|
| 9147 |
+
"loss": 0.738,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.09454994372027159,
|
| 9152 |
+
"grad_norm": 0.9401659965515137,
|
| 9153 |
+
"learning_rate": 9.877286250924145e-06,
|
| 9154 |
+
"loss": 1.9041,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.09462256272466504,
|
| 9159 |
+
"grad_norm": 0.4381430149078369,
|
| 9160 |
+
"learning_rate": 9.78714373545635e-06,
|
| 9161 |
+
"loss": 0.0697,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.0946951817290585,
|
| 9166 |
+
"grad_norm": 1.3570393323898315,
|
| 9167 |
+
"learning_rate": 9.697393270041533e-06,
|
| 9168 |
+
"loss": 0.3859,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.09476780073345195,
|
| 9173 |
+
"grad_norm": 1.3089454174041748,
|
| 9174 |
+
"learning_rate": 9.608035244720293e-06,
|
| 9175 |
+
"loss": 0.3144,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.09484041973784539,
|
| 9180 |
+
"grad_norm": 1.2595561742782593,
|
| 9181 |
+
"learning_rate": 9.519070047827705e-06,
|
| 9182 |
+
"loss": 0.1809,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.09491303874223884,
|
| 9187 |
+
"grad_norm": 1.9086363315582275,
|
| 9188 |
+
"learning_rate": 9.43049806599171e-06,
|
| 9189 |
+
"loss": 1.1982,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.0949856577466323,
|
| 9194 |
+
"grad_norm": 1.0663625001907349,
|
| 9195 |
+
"learning_rate": 9.342319684131395e-06,
|
| 9196 |
+
"loss": 0.427,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.09505827675102574,
|
| 9201 |
+
"grad_norm": 0.7126550674438477,
|
| 9202 |
+
"learning_rate": 9.254535285455334e-06,
|
| 9203 |
+
"loss": 0.2263,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.09513089575541919,
|
| 9208 |
+
"grad_norm": 1.6234203577041626,
|
| 9209 |
+
"learning_rate": 9.167145251459874e-06,
|
| 9210 |
+
"loss": 0.882,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.09520351475981265,
|
| 9215 |
+
"grad_norm": 1.5689666271209717,
|
| 9216 |
+
"learning_rate": 9.08014996192762e-06,
|
| 9217 |
+
"loss": 0.5279,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.09527613376420609,
|
| 9222 |
+
"grad_norm": 0.9089229106903076,
|
| 9223 |
+
"learning_rate": 8.993549794925537e-06,
|
| 9224 |
+
"loss": 0.2662,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.09534875276859954,
|
| 9229 |
+
"grad_norm": 1.274556279182434,
|
| 9230 |
+
"learning_rate": 8.907345126803602e-06,
|
| 9231 |
+
"loss": 0.7342,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.095421371772993,
|
| 9236 |
+
"grad_norm": 0.8675146102905273,
|
| 9237 |
+
"learning_rate": 8.821536332192881e-06,
|
| 9238 |
+
"loss": 0.5404,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.09549399077738645,
|
| 9243 |
+
"grad_norm": 1.2549453973770142,
|
| 9244 |
+
"learning_rate": 8.73612378400418e-06,
|
| 9245 |
+
"loss": 0.4228,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.09556660978177989,
|
| 9250 |
+
"grad_norm": 0.8200691938400269,
|
| 9251 |
+
"learning_rate": 8.651107853426144e-06,
|
| 9252 |
+
"loss": 1.197,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.09563922878617334,
|
| 9257 |
+
"grad_norm": 0.6289176940917969,
|
| 9258 |
+
"learning_rate": 8.56648890992392e-06,
|
| 9259 |
+
"loss": 0.1982,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.0957118477905668,
|
| 9264 |
+
"grad_norm": 1.0129482746124268,
|
| 9265 |
+
"learning_rate": 8.48226732123728e-06,
|
| 9266 |
+
"loss": 0.2332,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.09578446679496024,
|
| 9271 |
+
"grad_norm": 0.4745711088180542,
|
| 9272 |
+
"learning_rate": 8.398443453379267e-06,
|
| 9273 |
+
"loss": 0.0462,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.09585708579935369,
|
| 9278 |
+
"grad_norm": 1.2555979490280151,
|
| 9279 |
+
"learning_rate": 8.31501767063445e-06,
|
| 9280 |
+
"loss": 0.5048,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.09592970480374714,
|
| 9285 |
+
"grad_norm": 0.7158645391464233,
|
| 9286 |
+
"learning_rate": 8.231990335557382e-06,
|
| 9287 |
+
"loss": 0.6655,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.0960023238081406,
|
| 9292 |
+
"grad_norm": 1.7039415836334229,
|
| 9293 |
+
"learning_rate": 8.149361808971046e-06,
|
| 9294 |
+
"loss": 0.9697,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.09607494281253404,
|
| 9299 |
+
"grad_norm": 2.8836214542388916,
|
| 9300 |
+
"learning_rate": 8.067132449965254e-06,
|
| 9301 |
+
"loss": 1.3421,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.09614756181692749,
|
| 9306 |
+
"grad_norm": 0.9210156202316284,
|
| 9307 |
+
"learning_rate": 7.985302615895118e-06,
|
| 9308 |
+
"loss": 0.8105,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.09622018082132094,
|
| 9313 |
+
"grad_norm": 1.320299744606018,
|
| 9314 |
+
"learning_rate": 7.903872662379452e-06,
|
| 9315 |
+
"loss": 0.3165,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.09629279982571438,
|
| 9320 |
+
"grad_norm": 1.4190196990966797,
|
| 9321 |
+
"learning_rate": 7.822842943299291e-06,
|
| 9322 |
+
"loss": 1.4083,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.09636541883010784,
|
| 9327 |
+
"grad_norm": 0.8127224445343018,
|
| 9328 |
+
"learning_rate": 7.742213810796306e-06,
|
| 9329 |
+
"loss": 0.2264,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.09643803783450129,
|
| 9334 |
+
"grad_norm": 1.4018210172653198,
|
| 9335 |
+
"learning_rate": 7.661985615271283e-06,
|
| 9336 |
+
"loss": 0.3922,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.09651065683889475,
|
| 9341 |
+
"grad_norm": 1.673879623413086,
|
| 9342 |
+
"learning_rate": 7.582158705382581e-06,
|
| 9343 |
+
"loss": 0.4776,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.09658327584328819,
|
| 9348 |
+
"grad_norm": 1.472786545753479,
|
| 9349 |
+
"learning_rate": 7.502733428044683e-06,
|
| 9350 |
+
"loss": 0.4255,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.09665589484768164,
|
| 9355 |
+
"grad_norm": 1.442921757698059,
|
| 9356 |
+
"learning_rate": 7.423710128426608e-06,
|
| 9357 |
+
"loss": 0.5593,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.0967285138520751,
|
| 9362 |
+
"grad_norm": 1.4609953165054321,
|
| 9363 |
+
"learning_rate": 7.345089149950468e-06,
|
| 9364 |
+
"loss": 0.3764,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.09680113285646853,
|
| 9369 |
+
"grad_norm": 0.38055500388145447,
|
| 9370 |
+
"learning_rate": 7.26687083428993e-06,
|
| 9371 |
+
"loss": 0.1146,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.09687375186086199,
|
| 9376 |
+
"grad_norm": 1.7576863765716553,
|
| 9377 |
+
"learning_rate": 7.1890555213687925e-06,
|
| 9378 |
+
"loss": 0.7101,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.09694637086525544,
|
| 9383 |
+
"grad_norm": 1.7637895345687866,
|
| 9384 |
+
"learning_rate": 7.111643549359426e-06,
|
| 9385 |
+
"loss": 0.8437,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.09701898986964888,
|
| 9390 |
+
"grad_norm": 1.0466723442077637,
|
| 9391 |
+
"learning_rate": 7.034635254681388e-06,
|
| 9392 |
+
"loss": 0.3565,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.09709160887404233,
|
| 9397 |
+
"grad_norm": 1.7058753967285156,
|
| 9398 |
+
"learning_rate": 6.958030971999885e-06,
|
| 9399 |
+
"loss": 0.8995,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.09716422787843579,
|
| 9404 |
+
"grad_norm": 0.8441840410232544,
|
| 9405 |
+
"learning_rate": 6.881831034224418e-06,
|
| 9406 |
+
"loss": 0.6087,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.09723684688282924,
|
| 9411 |
+
"grad_norm": 0.9947230815887451,
|
| 9412 |
+
"learning_rate": 6.806035772507169e-06,
|
| 9413 |
+
"loss": 0.6672,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.09730946588722268,
|
| 9418 |
+
"grad_norm": 1.3739858865737915,
|
| 9419 |
+
"learning_rate": 6.730645516241785e-06,
|
| 9420 |
+
"loss": 1.2249,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.09738208489161614,
|
| 9425 |
+
"grad_norm": 1.043587327003479,
|
| 9426 |
+
"learning_rate": 6.655660593061719e-06,
|
| 9427 |
+
"loss": 0.6203,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.09745470389600959,
|
| 9432 |
+
"grad_norm": 1.4369832277297974,
|
| 9433 |
+
"learning_rate": 6.581081328839012e-06,
|
| 9434 |
+
"loss": 0.3429,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.09752732290040303,
|
| 9439 |
+
"grad_norm": 1.2933294773101807,
|
| 9440 |
+
"learning_rate": 6.506908047682669e-06,
|
| 9441 |
+
"loss": 0.8206,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.09759994190479648,
|
| 9446 |
+
"grad_norm": 1.2275322675704956,
|
| 9447 |
+
"learning_rate": 6.4331410719374765e-06,
|
| 9448 |
+
"loss": 0.6467,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.09767256090918994,
|
| 9453 |
+
"grad_norm": 1.4707872867584229,
|
| 9454 |
+
"learning_rate": 6.359780722182374e-06,
|
| 9455 |
+
"loss": 1.2502,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.09774517991358339,
|
| 9460 |
+
"grad_norm": 1.0052251815795898,
|
| 9461 |
+
"learning_rate": 6.286827317229294e-06,
|
| 9462 |
+
"loss": 1.0535,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.09781779891797683,
|
| 9467 |
+
"grad_norm": 0.7107351422309875,
|
| 9468 |
+
"learning_rate": 6.214281174121517e-06,
|
| 9469 |
+
"loss": 0.2857,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.09789041792237028,
|
| 9474 |
+
"grad_norm": 0.9116036295890808,
|
| 9475 |
+
"learning_rate": 6.142142608132551e-06,
|
| 9476 |
+
"loss": 0.4234,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.09796303692676374,
|
| 9481 |
+
"grad_norm": 1.8742185831069946,
|
| 9482 |
+
"learning_rate": 6.070411932764586e-06,
|
| 9483 |
+
"loss": 0.2691,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.09803565593115718,
|
| 9488 |
+
"grad_norm": 1.347491979598999,
|
| 9489 |
+
"learning_rate": 5.999089459747176e-06,
|
| 9490 |
+
"loss": 1.0205,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.09810827493555063,
|
| 9495 |
+
"grad_norm": 0.9007847905158997,
|
| 9496 |
+
"learning_rate": 5.9281754990359265e-06,
|
| 9497 |
+
"loss": 0.7101,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.09818089393994409,
|
| 9502 |
+
"grad_norm": 1.1569616794586182,
|
| 9503 |
+
"learning_rate": 5.857670358811096e-06,
|
| 9504 |
+
"loss": 1.2471,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.09825351294433754,
|
| 9509 |
+
"grad_norm": 1.3289388418197632,
|
| 9510 |
+
"learning_rate": 5.787574345476298e-06,
|
| 9511 |
+
"loss": 0.8682,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.09832613194873098,
|
| 9516 |
+
"grad_norm": 0.8255273699760437,
|
| 9517 |
+
"learning_rate": 5.717887763657115e-06,
|
| 9518 |
+
"loss": 1.0531,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.09839875095312443,
|
| 9523 |
+
"grad_norm": 0.6249293684959412,
|
| 9524 |
+
"learning_rate": 5.64861091619987e-06,
|
| 9525 |
+
"loss": 0.4606,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.09847136995751789,
|
| 9530 |
+
"grad_norm": 1.3531060218811035,
|
| 9531 |
+
"learning_rate": 5.579744104170137e-06,
|
| 9532 |
+
"loss": 1.2068,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.09854398896191133,
|
| 9537 |
+
"grad_norm": 0.8313332796096802,
|
| 9538 |
+
"learning_rate": 5.51128762685168e-06,
|
| 9539 |
+
"loss": 1.2926,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.09861660796630478,
|
| 9544 |
+
"grad_norm": 1.6587263345718384,
|
| 9545 |
+
"learning_rate": 5.443241781744924e-06,
|
| 9546 |
+
"loss": 0.3437,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.09868922697069824,
|
| 9551 |
+
"grad_norm": 1.6625611782073975,
|
| 9552 |
+
"learning_rate": 5.375606864565785e-06,
|
| 9553 |
+
"loss": 0.509,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.09876184597509167,
|
| 9558 |
+
"grad_norm": 1.2663472890853882,
|
| 9559 |
+
"learning_rate": 5.308383169244346e-06,
|
| 9560 |
+
"loss": 0.5083,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.09883446497948513,
|
| 9565 |
+
"grad_norm": 1.4353704452514648,
|
| 9566 |
+
"learning_rate": 5.241570987923616e-06,
|
| 9567 |
+
"loss": 0.6536,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.09890708398387858,
|
| 9572 |
+
"grad_norm": 1.2752748727798462,
|
| 9573 |
+
"learning_rate": 5.17517061095818e-06,
|
| 9574 |
+
"loss": 0.9895,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.09897970298827204,
|
| 9579 |
+
"grad_norm": 0.9998459219932556,
|
| 9580 |
+
"learning_rate": 5.109182326913054e-06,
|
| 9581 |
+
"loss": 0.9546,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.09905232199266548,
|
| 9586 |
+
"grad_norm": 1.1655049324035645,
|
| 9587 |
+
"learning_rate": 5.043606422562308e-06,
|
| 9588 |
+
"loss": 0.3596,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.09912494099705893,
|
| 9593 |
+
"grad_norm": 0.8313685655593872,
|
| 9594 |
+
"learning_rate": 4.978443182887904e-06,
|
| 9595 |
+
"loss": 0.3254,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.09919756000145238,
|
| 9600 |
+
"grad_norm": 1.1655441522598267,
|
| 9601 |
+
"learning_rate": 4.913692891078437e-06,
|
| 9602 |
+
"loss": 0.0872,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.09927017900584582,
|
| 9607 |
+
"grad_norm": 1.414899468421936,
|
| 9608 |
+
"learning_rate": 4.849355828527924e-06,
|
| 9609 |
+
"loss": 0.8065,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.09934279801023928,
|
| 9614 |
+
"grad_norm": 4.115533828735352,
|
| 9615 |
+
"learning_rate": 4.785432274834467e-06,
|
| 9616 |
+
"loss": 0.7248,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.09941541701463273,
|
| 9621 |
+
"grad_norm": 0.9328259229660034,
|
| 9622 |
+
"learning_rate": 4.721922507799248e-06,
|
| 9623 |
+
"loss": 0.3552,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.09948803601902619,
|
| 9628 |
+
"grad_norm": 2.7959179878234863,
|
| 9629 |
+
"learning_rate": 4.658826803425087e-06,
|
| 9630 |
+
"loss": 1.154,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.09956065502341963,
|
| 9635 |
+
"grad_norm": 0.8752519488334656,
|
| 9636 |
+
"learning_rate": 4.596145435915467e-06,
|
| 9637 |
+
"loss": 0.2589,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.09963327402781308,
|
| 9642 |
+
"grad_norm": 1.2420629262924194,
|
| 9643 |
+
"learning_rate": 4.5338786776731315e-06,
|
| 9644 |
+
"loss": 0.8256,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.09970589303220653,
|
| 9649 |
+
"grad_norm": 1.3992401361465454,
|
| 9650 |
+
"learning_rate": 4.472026799299045e-06,
|
| 9651 |
+
"loss": 0.8573,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.09977851203659997,
|
| 9656 |
+
"grad_norm": 2.5388810634613037,
|
| 9657 |
+
"learning_rate": 4.410590069591192e-06,
|
| 9658 |
+
"loss": 0.802,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.09985113104099343,
|
| 9663 |
+
"grad_norm": 0.8144094347953796,
|
| 9664 |
+
"learning_rate": 4.349568755543354e-06,
|
| 9665 |
+
"loss": 0.2611,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.09992375004538688,
|
| 9670 |
+
"grad_norm": 0.7593192458152771,
|
| 9671 |
+
"learning_rate": 4.288963122344026e-06,
|
| 9672 |
+
"loss": 0.2862,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.09999636904978033,
|
| 9677 |
+
"grad_norm": 1.6110453605651855,
|
| 9678 |
+
"learning_rate": 4.228773433375166e-06,
|
| 9679 |
+
"loss": 1.4914,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.10006898805417377,
|
| 9684 |
+
"grad_norm": 1.573285698890686,
|
| 9685 |
+
"learning_rate": 4.168999950211183e-06,
|
| 9686 |
+
"loss": 1.0053,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.10014160705856723,
|
| 9691 |
+
"grad_norm": 0.8916841745376587,
|
| 9692 |
+
"learning_rate": 4.10964293261763e-06,
|
| 9693 |
+
"loss": 0.3282,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.10021422606296068,
|
| 9698 |
+
"grad_norm": 0.7363986372947693,
|
| 9699 |
+
"learning_rate": 4.050702638550275e-06,
|
| 9700 |
+
"loss": 0.728,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.10028684506735412,
|
| 9705 |
+
"grad_norm": 1.3437317609786987,
|
| 9706 |
+
"learning_rate": 3.992179324153767e-06,
|
| 9707 |
+
"loss": 0.8321,
|
| 9708 |
+
"step": 1381
|
| 9709 |
+
},
|
| 9710 |
+
{
|
| 9711 |
+
"epoch": 0.10035946407174758,
|
| 9712 |
+
"grad_norm": 1.2932305335998535,
|
| 9713 |
+
"learning_rate": 3.93407324376075e-06,
|
| 9714 |
+
"loss": 0.705,
|
| 9715 |
+
"step": 1382
|
| 9716 |
+
},
|
| 9717 |
+
{
|
| 9718 |
+
"epoch": 0.10043208307614103,
|
| 9719 |
+
"grad_norm": 0.7181903719902039,
|
| 9720 |
+
"learning_rate": 3.876384649890519e-06,
|
| 9721 |
+
"loss": 0.8422,
|
| 9722 |
+
"step": 1383
|
| 9723 |
+
},
|
| 9724 |
+
{
|
| 9725 |
+
"epoch": 0.10050470208053447,
|
| 9726 |
+
"grad_norm": 0.4048106372356415,
|
| 9727 |
+
"learning_rate": 3.819113793248142e-06,
|
| 9728 |
+
"loss": 0.1317,
|
| 9729 |
+
"step": 1384
|
| 9730 |
+
},
|
| 9731 |
+
{
|
| 9732 |
+
"epoch": 0.10057732108492792,
|
| 9733 |
+
"grad_norm": 0.6735765933990479,
|
| 9734 |
+
"learning_rate": 3.7622609227231818e-06,
|
| 9735 |
+
"loss": 0.5035,
|
| 9736 |
+
"step": 1385
|
| 9737 |
+
},
|
| 9738 |
+
{
|
| 9739 |
+
"epoch": 0.10064994008932138,
|
| 9740 |
+
"grad_norm": 1.0079914331436157,
|
| 9741 |
+
"learning_rate": 3.7058262853887806e-06,
|
| 9742 |
+
"loss": 0.7891,
|
| 9743 |
+
"step": 1386
|
| 9744 |
+
},
|
| 9745 |
+
{
|
| 9746 |
+
"epoch": 0.10072255909371483,
|
| 9747 |
+
"grad_norm": 6.7523956298828125,
|
| 9748 |
+
"learning_rate": 3.6498101265004748e-06,
|
| 9749 |
+
"loss": 0.9884,
|
| 9750 |
+
"step": 1387
|
| 9751 |
+
},
|
| 9752 |
+
{
|
| 9753 |
+
"epoch": 0.10079517809810827,
|
| 9754 |
+
"grad_norm": 1.5442874431610107,
|
| 9755 |
+
"learning_rate": 3.5942126894951467e-06,
|
| 9756 |
+
"loss": 1.1627,
|
| 9757 |
+
"step": 1388
|
| 9758 |
+
},
|
| 9759 |
+
{
|
| 9760 |
+
"epoch": 0.10086779710250172,
|
| 9761 |
+
"grad_norm": 2.2174668312072754,
|
| 9762 |
+
"learning_rate": 3.5390342159900223e-06,
|
| 9763 |
+
"loss": 0.8084,
|
| 9764 |
+
"step": 1389
|
| 9765 |
+
},
|
| 9766 |
+
{
|
| 9767 |
+
"epoch": 0.10094041610689518,
|
| 9768 |
+
"grad_norm": 1.7226437330245972,
|
| 9769 |
+
"learning_rate": 3.4842749457815402e-06,
|
| 9770 |
+
"loss": 0.2909,
|
| 9771 |
+
"step": 1390
|
| 9772 |
+
},
|
| 9773 |
+
{
|
| 9774 |
+
"epoch": 0.10101303511128862,
|
| 9775 |
+
"grad_norm": 0.942451536655426,
|
| 9776 |
+
"learning_rate": 3.429935116844374e-06,
|
| 9777 |
+
"loss": 0.623,
|
| 9778 |
+
"step": 1391
|
| 9779 |
+
},
|
| 9780 |
+
{
|
| 9781 |
+
"epoch": 0.10108565411568207,
|
| 9782 |
+
"grad_norm": 1.5388950109481812,
|
| 9783 |
+
"learning_rate": 3.3760149653303872e-06,
|
| 9784 |
+
"loss": 0.8913,
|
| 9785 |
+
"step": 1392
|
| 9786 |
+
},
|
| 9787 |
+
{
|
| 9788 |
+
"epoch": 0.10115827312007553,
|
| 9789 |
+
"grad_norm": 0.85237056016922,
|
| 9790 |
+
"learning_rate": 3.3225147255675717e-06,
|
| 9791 |
+
"loss": 0.6387,
|
| 9792 |
+
"step": 1393
|
| 9793 |
+
},
|
| 9794 |
+
{
|
| 9795 |
+
"epoch": 0.10123089212446898,
|
| 9796 |
+
"grad_norm": 1.4029381275177002,
|
| 9797 |
+
"learning_rate": 3.269434630059054e-06,
|
| 9798 |
+
"loss": 0.4124,
|
| 9799 |
+
"step": 1394
|
| 9800 |
+
},
|
| 9801 |
+
{
|
| 9802 |
+
"epoch": 0.10130351112886242,
|
| 9803 |
+
"grad_norm": 1.4189517498016357,
|
| 9804 |
+
"learning_rate": 3.216774909482101e-06,
|
| 9805 |
+
"loss": 1.1734,
|
| 9806 |
+
"step": 1395
|
| 9807 |
+
},
|
| 9808 |
+
{
|
| 9809 |
+
"epoch": 0.10137613013325587,
|
| 9810 |
+
"grad_norm": 0.7705766558647156,
|
| 9811 |
+
"learning_rate": 3.1645357926870955e-06,
|
| 9812 |
+
"loss": 0.1378,
|
| 9813 |
+
"step": 1396
|
| 9814 |
+
},
|
| 9815 |
+
{
|
| 9816 |
+
"epoch": 0.10144874913764933,
|
| 9817 |
+
"grad_norm": 0.8672094345092773,
|
| 9818 |
+
"learning_rate": 3.1127175066965275e-06,
|
| 9819 |
+
"loss": 0.7838,
|
| 9820 |
+
"step": 1397
|
| 9821 |
+
},
|
| 9822 |
+
{
|
| 9823 |
+
"epoch": 0.10152136814204277,
|
| 9824 |
+
"grad_norm": 1.111504316329956,
|
| 9825 |
+
"learning_rate": 3.0613202767040492e-06,
|
| 9826 |
+
"loss": 0.471,
|
| 9827 |
+
"step": 1398
|
| 9828 |
+
},
|
| 9829 |
+
{
|
| 9830 |
+
"epoch": 0.10159398714643622,
|
| 9831 |
+
"grad_norm": 0.9333434700965881,
|
| 9832 |
+
"learning_rate": 3.0103443260734554e-06,
|
| 9833 |
+
"loss": 0.389,
|
| 9834 |
+
"step": 1399
|
| 9835 |
+
},
|
| 9836 |
+
{
|
| 9837 |
+
"epoch": 0.10166660615082967,
|
| 9838 |
+
"grad_norm": 0.958042562007904,
|
| 9839 |
+
"learning_rate": 2.9597898763377596e-06,
|
| 9840 |
+
"loss": 0.3355,
|
| 9841 |
+
"step": 1400
|
| 9842 |
+
},
|
| 9843 |
+
{
|
| 9844 |
+
"epoch": 0.10173922515522313,
|
| 9845 |
+
"grad_norm": 1.1177756786346436,
|
| 9846 |
+
"learning_rate": 2.9096571471981637e-06,
|
| 9847 |
+
"loss": 0.701,
|
| 9848 |
+
"step": 1401
|
| 9849 |
+
},
|
| 9850 |
+
{
|
| 9851 |
+
"epoch": 0.10181184415961657,
|
| 9852 |
+
"grad_norm": 1.34513521194458,
|
| 9853 |
+
"learning_rate": 2.8599463565231686e-06,
|
| 9854 |
+
"loss": 0.6273,
|
| 9855 |
+
"step": 1402
|
| 9856 |
+
},
|
| 9857 |
+
{
|
| 9858 |
+
"epoch": 0.10188446316401002,
|
| 9859 |
+
"grad_norm": 3.7566659450531006,
|
| 9860 |
+
"learning_rate": 2.810657720347587e-06,
|
| 9861 |
+
"loss": 0.5501,
|
| 9862 |
+
"step": 1403
|
| 9863 |
+
},
|
| 9864 |
+
{
|
| 9865 |
+
"epoch": 0.10195708216840348,
|
| 9866 |
+
"grad_norm": 0.7235520482063293,
|
| 9867 |
+
"learning_rate": 2.7617914528716315e-06,
|
| 9868 |
+
"loss": 0.6673,
|
| 9869 |
+
"step": 1404
|
| 9870 |
+
},
|
| 9871 |
+
{
|
| 9872 |
+
"epoch": 0.10202970117279692,
|
| 9873 |
+
"grad_norm": 0.687924861907959,
|
| 9874 |
+
"learning_rate": 2.713347766459984e-06,
|
| 9875 |
+
"loss": 0.387,
|
| 9876 |
+
"step": 1405
|
| 9877 |
+
},
|
| 9878 |
+
{
|
| 9879 |
+
"epoch": 0.10210232017719037,
|
| 9880 |
+
"grad_norm": 0.8283138871192932,
|
| 9881 |
+
"learning_rate": 2.6653268716407943e-06,
|
| 9882 |
+
"loss": 0.2871,
|
| 9883 |
+
"step": 1406
|
| 9884 |
+
},
|
| 9885 |
+
{
|
| 9886 |
+
"epoch": 0.10217493918158382,
|
| 9887 |
+
"grad_norm": 1.2459734678268433,
|
| 9888 |
+
"learning_rate": 2.6177289771049274e-06,
|
| 9889 |
+
"loss": 0.4239,
|
| 9890 |
+
"step": 1407
|
| 9891 |
+
},
|
| 9892 |
+
{
|
| 9893 |
+
"epoch": 0.10224755818597726,
|
| 9894 |
+
"grad_norm": 0.5479968786239624,
|
| 9895 |
+
"learning_rate": 2.570554289704863e-06,
|
| 9896 |
+
"loss": 0.4076,
|
| 9897 |
+
"step": 1408
|
| 9898 |
+
},
|
| 9899 |
+
{
|
| 9900 |
+
"epoch": 0.10232017719037072,
|
| 9901 |
+
"grad_norm": 1.2013438940048218,
|
| 9902 |
+
"learning_rate": 2.5238030144539737e-06,
|
| 9903 |
+
"loss": 1.9661,
|
| 9904 |
+
"step": 1409
|
| 9905 |
+
},
|
| 9906 |
+
{
|
| 9907 |
+
"epoch": 0.10239279619476417,
|
| 9908 |
+
"grad_norm": 0.9463850855827332,
|
| 9909 |
+
"learning_rate": 2.4774753545254935e-06,
|
| 9910 |
+
"loss": 0.6736,
|
| 9911 |
+
"step": 1410
|
| 9912 |
+
},
|
| 9913 |
+
{
|
| 9914 |
+
"epoch": 0.10246541519915763,
|
| 9915 |
+
"grad_norm": 1.0171250104904175,
|
| 9916 |
+
"learning_rate": 2.431571511251729e-06,
|
| 9917 |
+
"loss": 0.3619,
|
| 9918 |
+
"step": 1411
|
| 9919 |
+
},
|
| 9920 |
+
{
|
| 9921 |
+
"epoch": 0.10253803420355107,
|
| 9922 |
+
"grad_norm": 1.3767976760864258,
|
| 9923 |
+
"learning_rate": 2.3860916841231262e-06,
|
| 9924 |
+
"loss": 0.6396,
|
| 9925 |
+
"step": 1412
|
| 9926 |
+
},
|
| 9927 |
+
{
|
| 9928 |
+
"epoch": 0.10261065320794452,
|
| 9929 |
+
"grad_norm": 0.7756595611572266,
|
| 9930 |
+
"learning_rate": 2.3410360707874724e-06,
|
| 9931 |
+
"loss": 0.6802,
|
| 9932 |
+
"step": 1413
|
| 9933 |
+
},
|
| 9934 |
+
{
|
| 9935 |
+
"epoch": 0.10268327221233797,
|
| 9936 |
+
"grad_norm": 1.1990317106246948,
|
| 9937 |
+
"learning_rate": 2.296404867048929e-06,
|
| 9938 |
+
"loss": 0.4044,
|
| 9939 |
+
"step": 1414
|
| 9940 |
+
},
|
| 9941 |
+
{
|
| 9942 |
+
"epoch": 0.10275589121673141,
|
| 9943 |
+
"grad_norm": 0.7122053503990173,
|
| 9944 |
+
"learning_rate": 2.2521982668673004e-06,
|
| 9945 |
+
"loss": 0.109,
|
| 9946 |
+
"step": 1415
|
| 9947 |
+
},
|
| 9948 |
+
{
|
| 9949 |
+
"epoch": 0.10282851022112487,
|
| 9950 |
+
"grad_norm": 1.1803028583526611,
|
| 9951 |
+
"learning_rate": 2.208416462357088e-06,
|
| 9952 |
+
"loss": 0.575,
|
| 9953 |
+
"step": 1416
|
| 9954 |
+
},
|
| 9955 |
+
{
|
| 9956 |
+
"epoch": 0.10290112922551832,
|
| 9957 |
+
"grad_norm": 0.6144981384277344,
|
| 9958 |
+
"learning_rate": 2.165059643786749e-06,
|
| 9959 |
+
"loss": 0.2328,
|
| 9960 |
+
"step": 1417
|
| 9961 |
+
},
|
| 9962 |
+
{
|
| 9963 |
+
"epoch": 0.10297374822991177,
|
| 9964 |
+
"grad_norm": 1.3563295602798462,
|
| 9965 |
+
"learning_rate": 2.122127999577783e-06,
|
| 9966 |
+
"loss": 0.2736,
|
| 9967 |
+
"step": 1418
|
| 9968 |
+
},
|
| 9969 |
+
{
|
| 9970 |
+
"epoch": 0.10304636723430521,
|
| 9971 |
+
"grad_norm": 1.507043480873108,
|
| 9972 |
+
"learning_rate": 2.079621716303959e-06,
|
| 9973 |
+
"loss": 0.4774,
|
| 9974 |
+
"step": 1419
|
| 9975 |
+
},
|
| 9976 |
+
{
|
| 9977 |
+
"epoch": 0.10311898623869867,
|
| 9978 |
+
"grad_norm": 1.695013165473938,
|
| 9979 |
+
"learning_rate": 2.037540978690533e-06,
|
| 9980 |
+
"loss": 0.6716,
|
| 9981 |
+
"step": 1420
|
| 9982 |
+
},
|
| 9983 |
+
{
|
| 9984 |
+
"epoch": 0.10319160524309212,
|
| 9985 |
+
"grad_norm": 0.9312344193458557,
|
| 9986 |
+
"learning_rate": 1.9958859696133646e-06,
|
| 9987 |
+
"loss": 0.3224,
|
| 9988 |
+
"step": 1421
|
| 9989 |
+
},
|
| 9990 |
+
{
|
| 9991 |
+
"epoch": 0.10326422424748556,
|
| 9992 |
+
"grad_norm": 1.0863949060440063,
|
| 9993 |
+
"learning_rate": 1.954656870098193e-06,
|
| 9994 |
+
"loss": 1.3522,
|
| 9995 |
+
"step": 1422
|
| 9996 |
+
},
|
| 9997 |
+
{
|
| 9998 |
+
"epoch": 0.10333684325187902,
|
| 9999 |
+
"grad_norm": 1.090690016746521,
|
| 10000 |
+
"learning_rate": 1.9138538593198143e-06,
|
| 10001 |
+
"loss": 0.3509,
|
| 10002 |
+
"step": 1423
|
| 10003 |
+
},
|
| 10004 |
+
{
|
| 10005 |
+
"epoch": 0.10340946225627247,
|
| 10006 |
+
"grad_norm": 3.5825984477996826,
|
| 10007 |
+
"learning_rate": 1.8734771146013298e-06,
|
| 10008 |
+
"loss": 0.6386,
|
| 10009 |
+
"step": 1424
|
| 10010 |
+
},
|
| 10011 |
+
{
|
| 10012 |
+
"epoch": 0.10348208126066592,
|
| 10013 |
+
"grad_norm": 1.6424721479415894,
|
| 10014 |
+
"learning_rate": 1.833526811413344e-06,
|
| 10015 |
+
"loss": 0.6693,
|
| 10016 |
+
"step": 1425
|
| 10017 |
+
},
|
| 10018 |
+
{
|
| 10019 |
+
"epoch": 0.10355470026505936,
|
| 10020 |
+
"grad_norm": 1.9946939945220947,
|
| 10021 |
+
"learning_rate": 1.7940031233732223e-06,
|
| 10022 |
+
"loss": 0.5905,
|
| 10023 |
+
"step": 1426
|
| 10024 |
+
},
|
| 10025 |
+
{
|
| 10026 |
+
"epoch": 0.10362731926945282,
|
| 10027 |
+
"grad_norm": 1.213773488998413,
|
| 10028 |
+
"learning_rate": 1.7549062222443347e-06,
|
| 10029 |
+
"loss": 0.5926,
|
| 10030 |
+
"step": 1427
|
| 10031 |
+
},
|
| 10032 |
+
{
|
| 10033 |
+
"epoch": 0.10369993827384627,
|
| 10034 |
+
"grad_norm": 1.069018006324768,
|
| 10035 |
+
"learning_rate": 1.7162362779353014e-06,
|
| 10036 |
+
"loss": 0.2761,
|
| 10037 |
+
"step": 1428
|
| 10038 |
+
},
|
| 10039 |
+
{
|
| 10040 |
+
"epoch": 0.10377255727823971,
|
| 10041 |
+
"grad_norm": 1.6366422176361084,
|
| 10042 |
+
"learning_rate": 1.6779934584992718e-06,
|
| 10043 |
+
"loss": 0.8403,
|
| 10044 |
+
"step": 1429
|
| 10045 |
+
},
|
| 10046 |
+
{
|
| 10047 |
+
"epoch": 0.10384517628263316,
|
| 10048 |
+
"grad_norm": 1.2078701257705688,
|
| 10049 |
+
"learning_rate": 1.6401779301331466e-06,
|
| 10050 |
+
"loss": 1.3504,
|
| 10051 |
+
"step": 1430
|
| 10052 |
+
},
|
| 10053 |
+
{
|
| 10054 |
+
"epoch": 0.10391779528702662,
|
| 10055 |
+
"grad_norm": 1.0665812492370605,
|
| 10056 |
+
"learning_rate": 1.602789857176945e-06,
|
| 10057 |
+
"loss": 0.3522,
|
| 10058 |
+
"step": 1431
|
| 10059 |
+
},
|
| 10060 |
+
{
|
| 10061 |
+
"epoch": 0.10399041429142006,
|
| 10062 |
+
"grad_norm": 0.5771770477294922,
|
| 10063 |
+
"learning_rate": 1.5658294021129949e-06,
|
| 10064 |
+
"loss": 0.0827,
|
| 10065 |
+
"step": 1432
|
| 10066 |
+
},
|
| 10067 |
+
{
|
| 10068 |
+
"epoch": 0.10406303329581351,
|
| 10069 |
+
"grad_norm": 1.295080304145813,
|
| 10070 |
+
"learning_rate": 1.5292967255652657e-06,
|
| 10071 |
+
"loss": 0.8225,
|
| 10072 |
+
"step": 1433
|
| 10073 |
+
},
|
| 10074 |
+
{
|
| 10075 |
+
"epoch": 0.10413565230020697,
|
| 10076 |
+
"grad_norm": 0.6174052953720093,
|
| 10077 |
+
"learning_rate": 1.4931919862987143e-06,
|
| 10078 |
+
"loss": 0.1961,
|
| 10079 |
+
"step": 1434
|
| 10080 |
+
},
|
| 10081 |
+
{
|
| 10082 |
+
"epoch": 0.10420827130460042,
|
| 10083 |
+
"grad_norm": 1.3631885051727295,
|
| 10084 |
+
"learning_rate": 1.4575153412185073e-06,
|
| 10085 |
+
"loss": 0.73,
|
| 10086 |
+
"step": 1435
|
| 10087 |
+
},
|
| 10088 |
+
{
|
| 10089 |
+
"epoch": 0.10428089030899386,
|
| 10090 |
+
"grad_norm": 1.9511936902999878,
|
| 10091 |
+
"learning_rate": 1.4222669453694215e-06,
|
| 10092 |
+
"loss": 1.0947,
|
| 10093 |
+
"step": 1436
|
| 10094 |
+
},
|
| 10095 |
+
{
|
| 10096 |
+
"epoch": 0.10435350931338731,
|
| 10097 |
+
"grad_norm": 1.1087265014648438,
|
| 10098 |
+
"learning_rate": 1.3874469519350896e-06,
|
| 10099 |
+
"loss": 0.4215,
|
| 10100 |
+
"step": 1437
|
| 10101 |
+
},
|
| 10102 |
+
{
|
| 10103 |
+
"epoch": 0.10442612831778077,
|
| 10104 |
+
"grad_norm": 0.5323362350463867,
|
| 10105 |
+
"learning_rate": 1.3530555122374333e-06,
|
| 10106 |
+
"loss": 0.1444,
|
| 10107 |
+
"step": 1438
|
| 10108 |
+
},
|
| 10109 |
+
{
|
| 10110 |
+
"epoch": 0.1044987473221742,
|
| 10111 |
+
"grad_norm": 1.2896819114685059,
|
| 10112 |
+
"learning_rate": 1.3190927757358973e-06,
|
| 10113 |
+
"loss": 0.3816,
|
| 10114 |
+
"step": 1439
|
| 10115 |
+
},
|
| 10116 |
+
{
|
| 10117 |
+
"epoch": 0.10457136632656766,
|
| 10118 |
+
"grad_norm": 0.6727457642555237,
|
| 10119 |
+
"learning_rate": 1.2855588900269056e-06,
|
| 10120 |
+
"loss": 0.1802,
|
| 10121 |
+
"step": 1440
|
| 10122 |
+
},
|
| 10123 |
+
{
|
| 10124 |
+
"epoch": 0.10464398533096111,
|
| 10125 |
+
"grad_norm": 1.690926194190979,
|
| 10126 |
+
"learning_rate": 1.2524540008431174e-06,
|
| 10127 |
+
"loss": 0.1092,
|
| 10128 |
+
"step": 1441
|
| 10129 |
+
},
|
| 10130 |
+
{
|
| 10131 |
+
"epoch": 0.10471660433535457,
|
| 10132 |
+
"grad_norm": 1.0676002502441406,
|
| 10133 |
+
"learning_rate": 1.2197782520528833e-06,
|
| 10134 |
+
"loss": 1.1674,
|
| 10135 |
+
"step": 1442
|
| 10136 |
+
},
|
| 10137 |
+
{
|
| 10138 |
+
"epoch": 0.10478922333974801,
|
| 10139 |
+
"grad_norm": 1.1495492458343506,
|
| 10140 |
+
"learning_rate": 1.187531785659568e-06,
|
| 10141 |
+
"loss": 0.8279,
|
| 10142 |
+
"step": 1443
|
| 10143 |
+
},
|
| 10144 |
+
{
|
| 10145 |
+
"epoch": 0.10486184234414146,
|
| 10146 |
+
"grad_norm": 0.8466813564300537,
|
| 10147 |
+
"learning_rate": 1.1557147418009395e-06,
|
| 10148 |
+
"loss": 0.2328,
|
| 10149 |
+
"step": 1444
|
| 10150 |
+
},
|
| 10151 |
+
{
|
| 10152 |
+
"epoch": 0.10493446134853492,
|
| 10153 |
+
"grad_norm": 1.8412026166915894,
|
| 10154 |
+
"learning_rate": 1.124327258748581e-06,
|
| 10155 |
+
"loss": 0.2167,
|
| 10156 |
+
"step": 1445
|
| 10157 |
+
},
|
| 10158 |
+
{
|
| 10159 |
+
"epoch": 0.10500708035292836,
|
| 10160 |
+
"grad_norm": 0.9924756288528442,
|
| 10161 |
+
"learning_rate": 1.0933694729072686e-06,
|
| 10162 |
+
"loss": 0.1925,
|
| 10163 |
+
"step": 1446
|
| 10164 |
+
},
|
| 10165 |
+
{
|
| 10166 |
+
"epoch": 0.10507969935732181,
|
| 10167 |
+
"grad_norm": 1.585096001625061,
|
| 10168 |
+
"learning_rate": 1.0628415188143725e-06,
|
| 10169 |
+
"loss": 0.7968,
|
| 10170 |
+
"step": 1447
|
| 10171 |
+
},
|
| 10172 |
+
{
|
| 10173 |
+
"epoch": 0.10515231836171526,
|
| 10174 |
+
"grad_norm": 1.964674949645996,
|
| 10175 |
+
"learning_rate": 1.0327435291393017e-06,
|
| 10176 |
+
"loss": 0.6543,
|
| 10177 |
+
"step": 1448
|
| 10178 |
+
},
|
| 10179 |
+
{
|
| 10180 |
+
"epoch": 0.10522493736610872,
|
| 10181 |
+
"grad_norm": 0.8059599995613098,
|
| 10182 |
+
"learning_rate": 1.0030756346829151e-06,
|
| 10183 |
+
"loss": 0.4649,
|
| 10184 |
+
"step": 1449
|
| 10185 |
+
},
|
| 10186 |
+
{
|
| 10187 |
+
"epoch": 0.10529755637050216,
|
| 10188 |
+
"grad_norm": 1.5001825094223022,
|
| 10189 |
+
"learning_rate": 9.738379643769447e-07,
|
| 10190 |
+
"loss": 0.9363,
|
| 10191 |
+
"step": 1450
|
| 10192 |
+
},
|
| 10193 |
+
{
|
| 10194 |
+
"epoch": 0.10537017537489561,
|
| 10195 |
+
"grad_norm": 0.5860260725021362,
|
| 10196 |
+
"learning_rate": 9.450306452834179e-07,
|
| 10197 |
+
"loss": 0.3263,
|
| 10198 |
+
"step": 1451
|
| 10199 |
+
},
|
| 10200 |
+
{
|
| 10201 |
+
"epoch": 0.10544279437928906,
|
| 10202 |
+
"grad_norm": 0.9756802916526794,
|
| 10203 |
+
"learning_rate": 9.166538025941695e-07,
|
| 10204 |
+
"loss": 0.6314,
|
| 10205 |
+
"step": 1452
|
| 10206 |
+
},
|
| 10207 |
+
{
|
| 10208 |
+
"epoch": 0.1055154133836825,
|
| 10209 |
+
"grad_norm": 1.65675950050354,
|
| 10210 |
+
"learning_rate": 8.887075596302197e-07,
|
| 10211 |
+
"loss": 0.3653,
|
| 10212 |
+
"step": 1453
|
| 10213 |
+
},
|
| 10214 |
+
{
|
| 10215 |
+
"epoch": 0.10558803238807596,
|
| 10216 |
+
"grad_norm": 1.782583475112915,
|
| 10217 |
+
"learning_rate": 8.611920378412963e-07,
|
| 10218 |
+
"loss": 0.3469,
|
| 10219 |
+
"step": 1454
|
| 10220 |
+
},
|
| 10221 |
+
{
|
| 10222 |
+
"epoch": 0.10566065139246941,
|
| 10223 |
+
"grad_norm": 1.1538509130477905,
|
| 10224 |
+
"learning_rate": 8.341073568052805e-07,
|
| 10225 |
+
"loss": 1.101,
|
| 10226 |
+
"step": 1455
|
| 10227 |
+
},
|
| 10228 |
+
{
|
| 10229 |
+
"epoch": 0.10573327039686285,
|
| 10230 |
+
"grad_norm": 2.3746232986450195,
|
| 10231 |
+
"learning_rate": 8.074536342276618e-07,
|
| 10232 |
+
"loss": 0.726,
|
| 10233 |
+
"step": 1456
|
| 10234 |
+
},
|
| 10235 |
+
{
|
| 10236 |
+
"epoch": 0.1058058894012563,
|
| 10237 |
+
"grad_norm": 1.1129348278045654,
|
| 10238 |
+
"learning_rate": 7.81230985941106e-07,
|
| 10239 |
+
"loss": 0.4706,
|
| 10240 |
+
"step": 1457
|
| 10241 |
+
},
|
| 10242 |
+
{
|
| 10243 |
+
"epoch": 0.10587850840564976,
|
| 10244 |
+
"grad_norm": 0.40323030948638916,
|
| 10245 |
+
"learning_rate": 7.554395259048664e-07,
|
| 10246 |
+
"loss": 0.1263,
|
| 10247 |
+
"step": 1458
|
| 10248 |
+
},
|
| 10249 |
+
{
|
| 10250 |
+
"epoch": 0.10595112741004321,
|
| 10251 |
+
"grad_norm": 2.152052640914917,
|
| 10252 |
+
"learning_rate": 7.300793662043282e-07,
|
| 10253 |
+
"loss": 0.3132,
|
| 10254 |
+
"step": 1459
|
| 10255 |
+
},
|
| 10256 |
+
{
|
| 10257 |
+
"epoch": 0.10602374641443665,
|
| 10258 |
+
"grad_norm": 0.731158971786499,
|
| 10259 |
+
"learning_rate": 7.051506170505317e-07,
|
| 10260 |
+
"loss": 0.3027,
|
| 10261 |
+
"step": 1460
|
| 10262 |
+
},
|
| 10263 |
+
{
|
| 10264 |
+
"epoch": 0.10609636541883011,
|
| 10265 |
+
"grad_norm": 1.8550822734832764,
|
| 10266 |
+
"learning_rate": 6.806533867796728e-07,
|
| 10267 |
+
"loss": 1.2689,
|
| 10268 |
+
"step": 1461
|
| 10269 |
+
},
|
| 10270 |
+
{
|
| 10271 |
+
"epoch": 0.10616898442322356,
|
| 10272 |
+
"grad_norm": 0.8325512409210205,
|
| 10273 |
+
"learning_rate": 6.565877818526245e-07,
|
| 10274 |
+
"loss": 0.2715,
|
| 10275 |
+
"step": 1462
|
| 10276 |
+
},
|
| 10277 |
+
{
|
| 10278 |
+
"epoch": 0.106241603427617,
|
| 10279 |
+
"grad_norm": 2.1284432411193848,
|
| 10280 |
+
"learning_rate": 6.329539068545054e-07,
|
| 10281 |
+
"loss": 0.4751,
|
| 10282 |
+
"step": 1463
|
| 10283 |
+
},
|
| 10284 |
+
{
|
| 10285 |
+
"epoch": 0.10631422243201046,
|
| 10286 |
+
"grad_norm": 1.1094380617141724,
|
| 10287 |
+
"learning_rate": 6.097518644942013e-07,
|
| 10288 |
+
"loss": 0.4427,
|
| 10289 |
+
"step": 1464
|
| 10290 |
+
},
|
| 10291 |
+
{
|
| 10292 |
+
"epoch": 0.10638684143640391,
|
| 10293 |
+
"grad_norm": 0.9907796382904053,
|
| 10294 |
+
"learning_rate": 5.869817556039325e-07,
|
| 10295 |
+
"loss": 0.818,
|
| 10296 |
+
"step": 1465
|
| 10297 |
+
},
|
| 10298 |
+
{
|
| 10299 |
+
"epoch": 0.10645946044079736,
|
| 10300 |
+
"grad_norm": 1.0505033731460571,
|
| 10301 |
+
"learning_rate": 5.646436791387766e-07,
|
| 10302 |
+
"loss": 0.2598,
|
| 10303 |
+
"step": 1466
|
| 10304 |
+
},
|
| 10305 |
+
{
|
| 10306 |
+
"epoch": 0.1065320794451908,
|
| 10307 |
+
"grad_norm": 1.700927972793579,
|
| 10308 |
+
"learning_rate": 5.427377321763239e-07,
|
| 10309 |
+
"loss": 0.7721,
|
| 10310 |
+
"step": 1467
|
| 10311 |
+
},
|
| 10312 |
+
{
|
| 10313 |
+
"epoch": 0.10660469844958426,
|
| 10314 |
+
"grad_norm": 1.0606749057769775,
|
| 10315 |
+
"learning_rate": 5.212640099161559e-07,
|
| 10316 |
+
"loss": 0.2177,
|
| 10317 |
+
"step": 1468
|
| 10318 |
+
},
|
| 10319 |
+
{
|
| 10320 |
+
"epoch": 0.10667731745397771,
|
| 10321 |
+
"grad_norm": 1.0155739784240723,
|
| 10322 |
+
"learning_rate": 5.002226056795123e-07,
|
| 10323 |
+
"loss": 0.3887,
|
| 10324 |
+
"step": 1469
|
| 10325 |
+
},
|
| 10326 |
+
{
|
| 10327 |
+
"epoch": 0.10674993645837115,
|
| 10328 |
+
"grad_norm": 1.0859016180038452,
|
| 10329 |
+
"learning_rate": 4.796136109088023e-07,
|
| 10330 |
+
"loss": 0.7546,
|
| 10331 |
+
"step": 1470
|
| 10332 |
+
},
|
| 10333 |
+
{
|
| 10334 |
+
"epoch": 0.1068225554627646,
|
| 10335 |
+
"grad_norm": 1.2910510301589966,
|
| 10336 |
+
"learning_rate": 4.594371151673049e-07,
|
| 10337 |
+
"loss": 0.3414,
|
| 10338 |
+
"step": 1471
|
| 10339 |
+
},
|
| 10340 |
+
{
|
| 10341 |
+
"epoch": 0.10689517446715806,
|
| 10342 |
+
"grad_norm": 1.174484372138977,
|
| 10343 |
+
"learning_rate": 4.396932061387138e-07,
|
| 10344 |
+
"loss": 1.0507,
|
| 10345 |
+
"step": 1472
|
| 10346 |
+
},
|
| 10347 |
+
{
|
| 10348 |
+
"epoch": 0.10696779347155151,
|
| 10349 |
+
"grad_norm": 1.205222487449646,
|
| 10350 |
+
"learning_rate": 4.203819696267486e-07,
|
| 10351 |
+
"loss": 0.3548,
|
| 10352 |
+
"step": 1473
|
| 10353 |
+
},
|
| 10354 |
+
{
|
| 10355 |
+
"epoch": 0.10704041247594495,
|
| 10356 |
+
"grad_norm": 1.3310534954071045,
|
| 10357 |
+
"learning_rate": 4.015034895548664e-07,
|
| 10358 |
+
"loss": 0.779,
|
| 10359 |
+
"step": 1474
|
| 10360 |
+
},
|
| 10361 |
+
{
|
| 10362 |
+
"epoch": 0.1071130314803384,
|
| 10363 |
+
"grad_norm": 0.3511712849140167,
|
| 10364 |
+
"learning_rate": 3.8305784796576205e-07,
|
| 10365 |
+
"loss": 0.0755,
|
| 10366 |
+
"step": 1475
|
| 10367 |
+
},
|
| 10368 |
+
{
|
| 10369 |
+
"epoch": 0.10718565048473186,
|
| 10370 |
+
"grad_norm": 1.7754662036895752,
|
| 10371 |
+
"learning_rate": 3.6504512502113506e-07,
|
| 10372 |
+
"loss": 0.5768,
|
| 10373 |
+
"step": 1476
|
| 10374 |
+
},
|
| 10375 |
+
{
|
| 10376 |
+
"epoch": 0.1072582694891253,
|
| 10377 |
+
"grad_norm": 2.750741958618164,
|
| 10378 |
+
"learning_rate": 3.4746539900128994e-07,
|
| 10379 |
+
"loss": 0.4875,
|
| 10380 |
+
"step": 1477
|
| 10381 |
+
},
|
| 10382 |
+
{
|
| 10383 |
+
"epoch": 0.10733088849351875,
|
| 10384 |
+
"grad_norm": 1.190103530883789,
|
| 10385 |
+
"learning_rate": 3.303187463047808e-07,
|
| 10386 |
+
"loss": 0.6942,
|
| 10387 |
+
"step": 1478
|
| 10388 |
+
},
|
| 10389 |
+
{
|
| 10390 |
+
"epoch": 0.1074035074979122,
|
| 10391 |
+
"grad_norm": 2.258249521255493,
|
| 10392 |
+
"learning_rate": 3.1360524144810055e-07,
|
| 10393 |
+
"loss": 1.2607,
|
| 10394 |
+
"step": 1479
|
| 10395 |
+
},
|
| 10396 |
+
{
|
| 10397 |
+
"epoch": 0.10747612650230565,
|
| 10398 |
+
"grad_norm": 0.3821629583835602,
|
| 10399 |
+
"learning_rate": 2.9732495706535913e-07,
|
| 10400 |
+
"loss": 0.1124,
|
| 10401 |
+
"step": 1480
|
| 10402 |
+
},
|
| 10403 |
+
{
|
| 10404 |
+
"epoch": 0.1075487455066991,
|
| 10405 |
+
"grad_norm": 0.7363593578338623,
|
| 10406 |
+
"learning_rate": 2.8147796390797233e-07,
|
| 10407 |
+
"loss": 0.6482,
|
| 10408 |
+
"step": 1481
|
| 10409 |
+
},
|
| 10410 |
+
{
|
| 10411 |
+
"epoch": 0.10762136451109255,
|
| 10412 |
+
"grad_norm": 0.8737814426422119,
|
| 10413 |
+
"learning_rate": 2.660643308443289e-07,
|
| 10414 |
+
"loss": 0.3523,
|
| 10415 |
+
"step": 1482
|
| 10416 |
+
},
|
| 10417 |
+
{
|
| 10418 |
+
"epoch": 0.10769398351548601,
|
| 10419 |
+
"grad_norm": 0.19959469139575958,
|
| 10420 |
+
"learning_rate": 2.5108412485951305e-07,
|
| 10421 |
+
"loss": 0.121,
|
| 10422 |
+
"step": 1483
|
| 10423 |
+
},
|
| 10424 |
+
{
|
| 10425 |
+
"epoch": 0.10776660251987945,
|
| 10426 |
+
"grad_norm": 1.2553465366363525,
|
| 10427 |
+
"learning_rate": 2.3653741105499338e-07,
|
| 10428 |
+
"loss": 0.3897,
|
| 10429 |
+
"step": 1484
|
| 10430 |
+
},
|
| 10431 |
+
{
|
| 10432 |
+
"epoch": 0.1078392215242729,
|
| 10433 |
+
"grad_norm": 1.7560654878616333,
|
| 10434 |
+
"learning_rate": 2.2242425264838995e-07,
|
| 10435 |
+
"loss": 0.7474,
|
| 10436 |
+
"step": 1485
|
| 10437 |
+
},
|
| 10438 |
+
{
|
| 10439 |
+
"epoch": 0.10791184052866636,
|
| 10440 |
+
"grad_norm": 2.0694751739501953,
|
| 10441 |
+
"learning_rate": 2.0874471097311887e-07,
|
| 10442 |
+
"loss": 1.1434,
|
| 10443 |
+
"step": 1486
|
| 10444 |
+
},
|
| 10445 |
+
{
|
| 10446 |
+
"epoch": 0.1079844595330598,
|
| 10447 |
+
"grad_norm": 1.2232213020324707,
|
| 10448 |
+
"learning_rate": 1.9549884547819252e-07,
|
| 10449 |
+
"loss": 0.3334,
|
| 10450 |
+
"step": 1487
|
| 10451 |
+
},
|
| 10452 |
+
{
|
| 10453 |
+
"epoch": 0.10805707853745325,
|
| 10454 |
+
"grad_norm": 1.35491144657135,
|
| 10455 |
+
"learning_rate": 1.8268671372794199e-07,
|
| 10456 |
+
"loss": 0.6651,
|
| 10457 |
+
"step": 1488
|
| 10458 |
+
},
|
| 10459 |
+
{
|
| 10460 |
+
"epoch": 0.1081296975418467,
|
| 10461 |
+
"grad_norm": 0.6830072999000549,
|
| 10462 |
+
"learning_rate": 1.703083714017617e-07,
|
| 10463 |
+
"loss": 0.1716,
|
| 10464 |
+
"step": 1489
|
| 10465 |
+
},
|
| 10466 |
+
{
|
| 10467 |
+
"epoch": 0.10820231654624016,
|
| 10468 |
+
"grad_norm": 0.6254525780677795,
|
| 10469 |
+
"learning_rate": 1.5836387229386518e-07,
|
| 10470 |
+
"loss": 0.3798,
|
| 10471 |
+
"step": 1490
|
| 10472 |
+
},
|
| 10473 |
+
{
|
| 10474 |
+
"epoch": 0.1082749355506336,
|
| 10475 |
+
"grad_norm": 0.839185357093811,
|
| 10476 |
+
"learning_rate": 1.468532683130519e-07,
|
| 10477 |
+
"loss": 0.2766,
|
| 10478 |
+
"step": 1491
|
| 10479 |
+
},
|
| 10480 |
+
{
|
| 10481 |
+
"epoch": 0.10834755455502705,
|
| 10482 |
+
"grad_norm": 1.8218133449554443,
|
| 10483 |
+
"learning_rate": 1.3577660948249638e-07,
|
| 10484 |
+
"loss": 1.5886,
|
| 10485 |
+
"step": 1492
|
| 10486 |
+
},
|
| 10487 |
+
{
|
| 10488 |
+
"epoch": 0.1084201735594205,
|
| 10489 |
+
"grad_norm": 1.228155255317688,
|
| 10490 |
+
"learning_rate": 1.2513394393950384e-07,
|
| 10491 |
+
"loss": 0.3416,
|
| 10492 |
+
"step": 1493
|
| 10493 |
+
},
|
| 10494 |
+
{
|
| 10495 |
+
"epoch": 0.10849279256381394,
|
| 10496 |
+
"grad_norm": 0.7999048233032227,
|
| 10497 |
+
"learning_rate": 1.1492531793534379e-07,
|
| 10498 |
+
"loss": 0.372,
|
| 10499 |
+
"step": 1494
|
| 10500 |
+
},
|
| 10501 |
+
{
|
| 10502 |
+
"epoch": 0.1085654115682074,
|
| 10503 |
+
"grad_norm": 1.254194974899292,
|
| 10504 |
+
"learning_rate": 1.0515077583498344e-07,
|
| 10505 |
+
"loss": 0.6192,
|
| 10506 |
+
"step": 1495
|
| 10507 |
+
},
|
| 10508 |
+
{
|
| 10509 |
+
"epoch": 0.10863803057260085,
|
| 10510 |
+
"grad_norm": 0.6873809695243835,
|
| 10511 |
+
"learning_rate": 9.581036011696575e-08,
|
| 10512 |
+
"loss": 0.2073,
|
| 10513 |
+
"step": 1496
|
| 10514 |
+
},
|
| 10515 |
+
{
|
| 10516 |
+
"epoch": 0.1087106495769943,
|
| 10517 |
+
"grad_norm": 1.7734991312026978,
|
| 10518 |
+
"learning_rate": 8.690411137318722e-08,
|
| 10519 |
+
"loss": 1.1703,
|
| 10520 |
+
"step": 1497
|
| 10521 |
+
},
|
| 10522 |
+
{
|
| 10523 |
+
"epoch": 0.10878326858138775,
|
| 10524 |
+
"grad_norm": 1.6045475006103516,
|
| 10525 |
+
"learning_rate": 7.84320683087203e-08,
|
| 10526 |
+
"loss": 0.9832,
|
| 10527 |
+
"step": 1498
|
| 10528 |
+
},
|
| 10529 |
+
{
|
| 10530 |
+
"epoch": 0.1088558875857812,
|
| 10531 |
+
"grad_norm": 1.1792231798171997,
|
| 10532 |
+
"learning_rate": 7.039426774164693e-08,
|
| 10533 |
+
"loss": 0.2163,
|
| 10534 |
+
"step": 1499
|
| 10535 |
+
},
|
| 10536 |
+
{
|
| 10537 |
+
"epoch": 0.10892850659017465,
|
| 10538 |
+
"grad_norm": 1.6975489854812622,
|
| 10539 |
+
"learning_rate": 6.279074460291412e-08,
|
| 10540 |
+
"loss": 0.3715,
|
| 10541 |
+
"step": 1500
|
| 10542 |
+
},
|
| 10543 |
+
{
|
| 10544 |
+
"epoch": 0.1090011255945681,
|
| 10545 |
+
"grad_norm": 1.123024582862854,
|
| 10546 |
+
"learning_rate": 5.562153193616748e-08,
|
| 10547 |
+
"loss": 0.5485,
|
| 10548 |
+
"step": 1501
|
| 10549 |
+
},
|
| 10550 |
+
{
|
| 10551 |
+
"epoch": 0.10907374459896155,
|
| 10552 |
+
"grad_norm": 0.5258827209472656,
|
| 10553 |
+
"learning_rate": 4.888666089761795e-08,
|
| 10554 |
+
"loss": 0.3644,
|
| 10555 |
+
"step": 1502
|
| 10556 |
+
},
|
| 10557 |
+
{
|
| 10558 |
+
"epoch": 0.109146363603355,
|
| 10559 |
+
"grad_norm": 1.9915560483932495,
|
| 10560 |
+
"learning_rate": 4.25861607558864e-08,
|
| 10561 |
+
"loss": 0.6705,
|
| 10562 |
+
"step": 1503
|
| 10563 |
+
},
|
| 10564 |
+
{
|
| 10565 |
+
"epoch": 0.10921898260774844,
|
| 10566 |
+
"grad_norm": 1.1695542335510254,
|
| 10567 |
+
"learning_rate": 3.672005889189256e-08,
|
| 10568 |
+
"loss": 1.0088,
|
| 10569 |
+
"step": 1504
|
| 10570 |
+
},
|
| 10571 |
+
{
|
| 10572 |
+
"epoch": 0.1092916016121419,
|
| 10573 |
+
"grad_norm": 0.6647590398788452,
|
| 10574 |
+
"learning_rate": 3.128838079874408e-08,
|
| 10575 |
+
"loss": 0.2886,
|
| 10576 |
+
"step": 1505
|
| 10577 |
+
},
|
| 10578 |
+
{
|
| 10579 |
+
"epoch": 0.10936422061653535,
|
| 10580 |
+
"grad_norm": 0.9463073015213013,
|
| 10581 |
+
"learning_rate": 2.6291150081603212e-08,
|
| 10582 |
+
"loss": 0.5844,
|
| 10583 |
+
"step": 1506
|
| 10584 |
+
},
|
| 10585 |
+
{
|
| 10586 |
+
"epoch": 0.1094368396209288,
|
| 10587 |
+
"grad_norm": 2.3922953605651855,
|
| 10588 |
+
"learning_rate": 2.1728388457620262e-08,
|
| 10589 |
+
"loss": 1.256,
|
| 10590 |
+
"step": 1507
|
| 10591 |
+
},
|
| 10592 |
+
{
|
| 10593 |
+
"epoch": 0.10950945862532224,
|
| 10594 |
+
"grad_norm": 1.4629487991333008,
|
| 10595 |
+
"learning_rate": 1.7600115755789237e-08,
|
| 10596 |
+
"loss": 0.7184,
|
| 10597 |
+
"step": 1508
|
| 10598 |
+
},
|
| 10599 |
+
{
|
| 10600 |
+
"epoch": 0.1095820776297157,
|
| 10601 |
+
"grad_norm": 0.9099496006965637,
|
| 10602 |
+
"learning_rate": 1.3906349916881222e-08,
|
| 10603 |
+
"loss": 0.4936,
|
| 10604 |
+
"step": 1509
|
| 10605 |
+
},
|
| 10606 |
+
{
|
| 10607 |
+
"epoch": 0.10965469663410915,
|
| 10608 |
+
"grad_norm": 2.127716064453125,
|
| 10609 |
+
"learning_rate": 1.0647106993411093e-08,
|
| 10610 |
+
"loss": 0.9896,
|
| 10611 |
+
"step": 1510
|
| 10612 |
+
},
|
| 10613 |
+
{
|
| 10614 |
+
"epoch": 0.10972731563850259,
|
| 10615 |
+
"grad_norm": 1.0680584907531738,
|
| 10616 |
+
"learning_rate": 7.822401149482073e-09,
|
| 10617 |
+
"loss": 0.3718,
|
| 10618 |
+
"step": 1511
|
| 10619 |
+
},
|
| 10620 |
+
{
|
| 10621 |
+
"epoch": 0.10979993464289604,
|
| 10622 |
+
"grad_norm": 0.38920649886131287,
|
| 10623 |
+
"learning_rate": 5.4322446608190415e-09,
|
| 10624 |
+
"loss": 0.1009,
|
| 10625 |
+
"step": 1512
|
| 10626 |
+
},
|
| 10627 |
+
{
|
| 10628 |
+
"epoch": 0.1098725536472895,
|
| 10629 |
+
"grad_norm": 0.7909975647926331,
|
| 10630 |
+
"learning_rate": 3.4766479146242093e-09,
|
| 10631 |
+
"loss": 0.2062,
|
| 10632 |
+
"step": 1513
|
| 10633 |
+
},
|
| 10634 |
+
{
|
| 10635 |
+
"epoch": 0.10994517265168295,
|
| 10636 |
+
"grad_norm": 0.774316132068634,
|
| 10637 |
+
"learning_rate": 1.9556194096104207e-09,
|
| 10638 |
+
"loss": 0.2668,
|
| 10639 |
+
"step": 1514
|
| 10640 |
+
},
|
| 10641 |
+
{
|
| 10642 |
+
"epoch": 0.11001779165607639,
|
| 10643 |
+
"grad_norm": 1.4189250469207764,
|
| 10644 |
+
"learning_rate": 8.691657559012356e-10,
|
| 10645 |
+
"loss": 1.1424,
|
| 10646 |
+
"step": 1515
|
| 10647 |
+
},
|
| 10648 |
+
{
|
| 10649 |
+
"epoch": 0.11009041066046985,
|
| 10650 |
+
"grad_norm": 0.8423942923545837,
|
| 10651 |
+
"learning_rate": 2.1729167505313286e-10,
|
| 10652 |
+
"loss": 1.034,
|
| 10653 |
+
"step": 1516
|
| 10654 |
+
},
|
| 10655 |
+
{
|
| 10656 |
+
"epoch": 0.1101630296648633,
|
| 10657 |
+
"grad_norm": 1.5990933179855347,
|
| 10658 |
+
"learning_rate": 0.0,
|
| 10659 |
+
"loss": 0.7355,
|
| 10660 |
+
"step": 1517
|
| 10661 |
}
|
| 10662 |
],
|
| 10663 |
"logging_steps": 1,
|
|
|
|
| 10672 |
"should_evaluate": false,
|
| 10673 |
"should_log": false,
|
| 10674 |
"should_save": true,
|
| 10675 |
+
"should_training_stop": true
|
| 10676 |
},
|
| 10677 |
"attributes": {}
|
| 10678 |
}
|
| 10679 |
},
|
| 10680 |
+
"total_flos": 2.8797060661641216e+17,
|
| 10681 |
"train_batch_size": 2,
|
| 10682 |
"trial_name": null,
|
| 10683 |
"trial_params": null
|