Training in progress, step 1588, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 125040
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f89918e77e25205c04536ba8120a65905f973cfb554fd612720570d24d59505b
|
| 3 |
size 125040
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 162868
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8f50649205e80f2c2b85621e32cd0c3f94476e20893bf5484f0b0bb0af74af7e
|
| 3 |
size 162868
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:889782e930af6b4f5a6e9aacae72875b3bcbcdc8ce7c3e07fb4fdc102280abe8
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:defeb9ef8cdd685910c10cc6564d4128ee785f1cb589efa38e65694d45921bec
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 397,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -8376,6 +8376,2793 @@
|
|
| 8376 |
"eval_samples_per_second": 380.066,
|
| 8377 |
"eval_steps_per_second": 190.084,
|
| 8378 |
"step": 1191
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8379 |
}
|
| 8380 |
],
|
| 8381 |
"logging_steps": 1,
|
|
@@ -8390,12 +11177,12 @@
|
|
| 8390 |
"should_evaluate": false,
|
| 8391 |
"should_log": false,
|
| 8392 |
"should_save": true,
|
| 8393 |
-
"should_training_stop":
|
| 8394 |
},
|
| 8395 |
"attributes": {}
|
| 8396 |
}
|
| 8397 |
},
|
| 8398 |
-
"total_flos":
|
| 8399 |
"train_batch_size": 2,
|
| 8400 |
"trial_name": null,
|
| 8401 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.17932864684791508,
|
| 5 |
"eval_steps": 397,
|
| 6 |
+
"global_step": 1588,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 8376 |
"eval_samples_per_second": 380.066,
|
| 8377 |
"eval_steps_per_second": 190.084,
|
| 8378 |
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.13460941249541233,
|
| 8382 |
+
"grad_norm": 2.2192535400390625,
|
| 8383 |
+
"learning_rate": 2.9500800504489022e-05,
|
| 8384 |
+
"loss": 41.7099,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.13472233985488835,
|
| 8389 |
+
"grad_norm": 1.7846317291259766,
|
| 8390 |
+
"learning_rate": 2.9359744782243302e-05,
|
| 8391 |
+
"loss": 41.3066,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.13483526721436437,
|
| 8396 |
+
"grad_norm": 2.3239450454711914,
|
| 8397 |
+
"learning_rate": 2.921896904699539e-05,
|
| 8398 |
+
"loss": 41.698,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.13494819457384039,
|
| 8403 |
+
"grad_norm": 1.9225789308547974,
|
| 8404 |
+
"learning_rate": 2.9078473856718636e-05,
|
| 8405 |
+
"loss": 41.6897,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.1350611219333164,
|
| 8410 |
+
"grad_norm": 1.7814782857894897,
|
| 8411 |
+
"learning_rate": 2.8938259768274355e-05,
|
| 8412 |
+
"loss": 42.0508,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.13517404929279242,
|
| 8417 |
+
"grad_norm": 1.543961524963379,
|
| 8418 |
+
"learning_rate": 2.8798327337409658e-05,
|
| 8419 |
+
"loss": 42.1157,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.13528697665226844,
|
| 8424 |
+
"grad_norm": 1.8937549591064453,
|
| 8425 |
+
"learning_rate": 2.8658677118755382e-05,
|
| 8426 |
+
"loss": 40.9603,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.13539990401174445,
|
| 8431 |
+
"grad_norm": 2.110856056213379,
|
| 8432 |
+
"learning_rate": 2.8519309665823734e-05,
|
| 8433 |
+
"loss": 41.3589,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.13551283137122047,
|
| 8438 |
+
"grad_norm": 1.5677906274795532,
|
| 8439 |
+
"learning_rate": 2.8380225531006233e-05,
|
| 8440 |
+
"loss": 42.3213,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.1356257587306965,
|
| 8445 |
+
"grad_norm": 2.013374090194702,
|
| 8446 |
+
"learning_rate": 2.824142526557142e-05,
|
| 8447 |
+
"loss": 41.4299,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.1357386860901725,
|
| 8452 |
+
"grad_norm": 1.624967098236084,
|
| 8453 |
+
"learning_rate": 2.8102909419662616e-05,
|
| 8454 |
+
"loss": 40.7646,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.13585161344964852,
|
| 8459 |
+
"grad_norm": 1.9957424402236938,
|
| 8460 |
+
"learning_rate": 2.796467854229594e-05,
|
| 8461 |
+
"loss": 41.2351,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.13596454080912454,
|
| 8466 |
+
"grad_norm": 1.9364780187606812,
|
| 8467 |
+
"learning_rate": 2.7826733181357932e-05,
|
| 8468 |
+
"loss": 41.0952,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.13607746816860056,
|
| 8473 |
+
"grad_norm": 1.9321202039718628,
|
| 8474 |
+
"learning_rate": 2.76890738836036e-05,
|
| 8475 |
+
"loss": 41.3162,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.13619039552807657,
|
| 8480 |
+
"grad_norm": 1.7783724069595337,
|
| 8481 |
+
"learning_rate": 2.755170119465402e-05,
|
| 8482 |
+
"loss": 41.4027,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.1363033228875526,
|
| 8487 |
+
"grad_norm": 1.8329112529754639,
|
| 8488 |
+
"learning_rate": 2.741461565899426e-05,
|
| 8489 |
+
"loss": 41.2073,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.1364162502470286,
|
| 8494 |
+
"grad_norm": 1.6131526231765747,
|
| 8495 |
+
"learning_rate": 2.7277817819971242e-05,
|
| 8496 |
+
"loss": 41.7681,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.13652917760650463,
|
| 8501 |
+
"grad_norm": 1.927401065826416,
|
| 8502 |
+
"learning_rate": 2.7141308219791706e-05,
|
| 8503 |
+
"loss": 41.2627,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.13664210496598064,
|
| 8508 |
+
"grad_norm": 1.968011498451233,
|
| 8509 |
+
"learning_rate": 2.7005087399519835e-05,
|
| 8510 |
+
"loss": 40.8323,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.13675503232545666,
|
| 8515 |
+
"grad_norm": 2.1835312843322754,
|
| 8516 |
+
"learning_rate": 2.6869155899075184e-05,
|
| 8517 |
+
"loss": 41.6933,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.13686795968493268,
|
| 8522 |
+
"grad_norm": 1.6554856300354004,
|
| 8523 |
+
"learning_rate": 2.673351425723064e-05,
|
| 8524 |
+
"loss": 42.2046,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.1369808870444087,
|
| 8529 |
+
"grad_norm": 2.0315005779266357,
|
| 8530 |
+
"learning_rate": 2.6598163011610177e-05,
|
| 8531 |
+
"loss": 41.2969,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.1370938144038847,
|
| 8536 |
+
"grad_norm": 1.7202644348144531,
|
| 8537 |
+
"learning_rate": 2.6463102698686825e-05,
|
| 8538 |
+
"loss": 41.2584,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.13720674176336073,
|
| 8543 |
+
"grad_norm": 1.9241304397583008,
|
| 8544 |
+
"learning_rate": 2.6328333853780453e-05,
|
| 8545 |
+
"loss": 40.564,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.13731966912283675,
|
| 8550 |
+
"grad_norm": 2.0288655757904053,
|
| 8551 |
+
"learning_rate": 2.6193857011055622e-05,
|
| 8552 |
+
"loss": 41.1175,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.13743259648231276,
|
| 8557 |
+
"grad_norm": 1.9641456604003906,
|
| 8558 |
+
"learning_rate": 2.605967270351959e-05,
|
| 8559 |
+
"loss": 41.4986,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.13754552384178878,
|
| 8564 |
+
"grad_norm": 1.8553285598754883,
|
| 8565 |
+
"learning_rate": 2.592578146302008e-05,
|
| 8566 |
+
"loss": 40.9583,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.1376584512012648,
|
| 8571 |
+
"grad_norm": 1.9806032180786133,
|
| 8572 |
+
"learning_rate": 2.5792183820243332e-05,
|
| 8573 |
+
"loss": 41.3963,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.13777137856074081,
|
| 8578 |
+
"grad_norm": 2.5576655864715576,
|
| 8579 |
+
"learning_rate": 2.565888030471183e-05,
|
| 8580 |
+
"loss": 41.7427,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.13788430592021683,
|
| 8585 |
+
"grad_norm": 2.0658464431762695,
|
| 8586 |
+
"learning_rate": 2.5525871444782177e-05,
|
| 8587 |
+
"loss": 41.257,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.13799723327969285,
|
| 8592 |
+
"grad_norm": 1.7856922149658203,
|
| 8593 |
+
"learning_rate": 2.5393157767643228e-05,
|
| 8594 |
+
"loss": 41.6337,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.13811016063916887,
|
| 8599 |
+
"grad_norm": 1.9582910537719727,
|
| 8600 |
+
"learning_rate": 2.526073979931376e-05,
|
| 8601 |
+
"loss": 42.2887,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.13822308799864488,
|
| 8606 |
+
"grad_norm": 1.9009538888931274,
|
| 8607 |
+
"learning_rate": 2.512861806464063e-05,
|
| 8608 |
+
"loss": 41.4537,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.1383360153581209,
|
| 8613 |
+
"grad_norm": 1.7697234153747559,
|
| 8614 |
+
"learning_rate": 2.499679308729639e-05,
|
| 8615 |
+
"loss": 42.1161,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.13844894271759692,
|
| 8620 |
+
"grad_norm": 1.847658395767212,
|
| 8621 |
+
"learning_rate": 2.486526538977745e-05,
|
| 8622 |
+
"loss": 41.2714,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.13856187007707294,
|
| 8627 |
+
"grad_norm": 2.263278007507324,
|
| 8628 |
+
"learning_rate": 2.4734035493401953e-05,
|
| 8629 |
+
"loss": 41.0284,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.13867479743654895,
|
| 8634 |
+
"grad_norm": 1.989181399345398,
|
| 8635 |
+
"learning_rate": 2.4603103918307625e-05,
|
| 8636 |
+
"loss": 40.9727,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.13878772479602497,
|
| 8641 |
+
"grad_norm": 1.9599343538284302,
|
| 8642 |
+
"learning_rate": 2.447247118344992e-05,
|
| 8643 |
+
"loss": 41.8633,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.138900652155501,
|
| 8648 |
+
"grad_norm": 1.748854160308838,
|
| 8649 |
+
"learning_rate": 2.434213780659962e-05,
|
| 8650 |
+
"loss": 41.2352,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.139013579514977,
|
| 8655 |
+
"grad_norm": 1.909290075302124,
|
| 8656 |
+
"learning_rate": 2.4212104304341122e-05,
|
| 8657 |
+
"loss": 42.0541,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.13912650687445302,
|
| 8662 |
+
"grad_norm": 1.9810950756072998,
|
| 8663 |
+
"learning_rate": 2.408237119207022e-05,
|
| 8664 |
+
"loss": 40.9978,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.13923943423392904,
|
| 8669 |
+
"grad_norm": 1.996480107307434,
|
| 8670 |
+
"learning_rate": 2.3952938983992055e-05,
|
| 8671 |
+
"loss": 41.9435,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.13935236159340506,
|
| 8676 |
+
"grad_norm": 1.9617669582366943,
|
| 8677 |
+
"learning_rate": 2.3823808193119178e-05,
|
| 8678 |
+
"loss": 41.3043,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.13946528895288107,
|
| 8683 |
+
"grad_norm": 1.9474812746047974,
|
| 8684 |
+
"learning_rate": 2.3694979331269417e-05,
|
| 8685 |
+
"loss": 41.2342,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.1395782163123571,
|
| 8690 |
+
"grad_norm": 1.8163639307022095,
|
| 8691 |
+
"learning_rate": 2.3566452909063885e-05,
|
| 8692 |
+
"loss": 41.5509,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.1396911436718331,
|
| 8697 |
+
"grad_norm": 1.628935694694519,
|
| 8698 |
+
"learning_rate": 2.3438229435924952e-05,
|
| 8699 |
+
"loss": 41.3588,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.13980407103130912,
|
| 8704 |
+
"grad_norm": 2.0755550861358643,
|
| 8705 |
+
"learning_rate": 2.3310309420074305e-05,
|
| 8706 |
+
"loss": 41.3944,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.13991699839078514,
|
| 8711 |
+
"grad_norm": 2.1491308212280273,
|
| 8712 |
+
"learning_rate": 2.3182693368530718e-05,
|
| 8713 |
+
"loss": 42.4515,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.14002992575026116,
|
| 8718 |
+
"grad_norm": 1.7521034479141235,
|
| 8719 |
+
"learning_rate": 2.305538178710831e-05,
|
| 8720 |
+
"loss": 41.1641,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.14014285310973715,
|
| 8725 |
+
"grad_norm": 1.7531728744506836,
|
| 8726 |
+
"learning_rate": 2.2928375180414342e-05,
|
| 8727 |
+
"loss": 41.3065,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.14025578046921316,
|
| 8732 |
+
"grad_norm": 1.8612360954284668,
|
| 8733 |
+
"learning_rate": 2.2801674051847298e-05,
|
| 8734 |
+
"loss": 41.1855,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.14036870782868918,
|
| 8739 |
+
"grad_norm": 1.685853123664856,
|
| 8740 |
+
"learning_rate": 2.2675278903594988e-05,
|
| 8741 |
+
"loss": 41.1763,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.1404816351881652,
|
| 8746 |
+
"grad_norm": 1.7316287755966187,
|
| 8747 |
+
"learning_rate": 2.2549190236632268e-05,
|
| 8748 |
+
"loss": 41.5232,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.14059456254764122,
|
| 8753 |
+
"grad_norm": 2.064657211303711,
|
| 8754 |
+
"learning_rate": 2.242340855071935e-05,
|
| 8755 |
+
"loss": 41.1393,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.14070748990711723,
|
| 8760 |
+
"grad_norm": 1.5233075618743896,
|
| 8761 |
+
"learning_rate": 2.2297934344399695e-05,
|
| 8762 |
+
"loss": 41.5075,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.14082041726659325,
|
| 8767 |
+
"grad_norm": 2.008065700531006,
|
| 8768 |
+
"learning_rate": 2.2172768114998022e-05,
|
| 8769 |
+
"loss": 41.4531,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.14093334462606927,
|
| 8774 |
+
"grad_norm": 2.0605196952819824,
|
| 8775 |
+
"learning_rate": 2.2047910358618405e-05,
|
| 8776 |
+
"loss": 41.6452,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.14104627198554529,
|
| 8781 |
+
"grad_norm": 1.735588550567627,
|
| 8782 |
+
"learning_rate": 2.192336157014223e-05,
|
| 8783 |
+
"loss": 40.9965,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.1411591993450213,
|
| 8788 |
+
"grad_norm": 1.7823458909988403,
|
| 8789 |
+
"learning_rate": 2.179912224322629e-05,
|
| 8790 |
+
"loss": 41.3997,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.14127212670449732,
|
| 8795 |
+
"grad_norm": 2.0419623851776123,
|
| 8796 |
+
"learning_rate": 2.1675192870300797e-05,
|
| 8797 |
+
"loss": 41.5207,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.14138505406397334,
|
| 8802 |
+
"grad_norm": 2.0400092601776123,
|
| 8803 |
+
"learning_rate": 2.155157394256745e-05,
|
| 8804 |
+
"loss": 41.3544,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.14149798142344935,
|
| 8809 |
+
"grad_norm": 1.7465420961380005,
|
| 8810 |
+
"learning_rate": 2.1428265949997463e-05,
|
| 8811 |
+
"loss": 41.4991,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.14161090878292537,
|
| 8816 |
+
"grad_norm": 1.8099271059036255,
|
| 8817 |
+
"learning_rate": 2.130526938132966e-05,
|
| 8818 |
+
"loss": 42.0492,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.1417238361424014,
|
| 8823 |
+
"grad_norm": 2.0546629428863525,
|
| 8824 |
+
"learning_rate": 2.118258472406851e-05,
|
| 8825 |
+
"loss": 41.6128,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.1418367635018774,
|
| 8830 |
+
"grad_norm": 1.825598955154419,
|
| 8831 |
+
"learning_rate": 2.1060212464482197e-05,
|
| 8832 |
+
"loss": 41.5949,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.14194969086135342,
|
| 8837 |
+
"grad_norm": 2.2495925426483154,
|
| 8838 |
+
"learning_rate": 2.093815308760071e-05,
|
| 8839 |
+
"loss": 41.919,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.14206261822082944,
|
| 8844 |
+
"grad_norm": 1.8565434217453003,
|
| 8845 |
+
"learning_rate": 2.0816407077213896e-05,
|
| 8846 |
+
"loss": 41.8396,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.14217554558030546,
|
| 8851 |
+
"grad_norm": 1.871829628944397,
|
| 8852 |
+
"learning_rate": 2.0694974915869536e-05,
|
| 8853 |
+
"loss": 41.4325,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.14228847293978147,
|
| 8858 |
+
"grad_norm": 1.8556500673294067,
|
| 8859 |
+
"learning_rate": 2.0573857084871507e-05,
|
| 8860 |
+
"loss": 41.2221,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.1424014002992575,
|
| 8865 |
+
"grad_norm": 1.7879801988601685,
|
| 8866 |
+
"learning_rate": 2.0453054064277756e-05,
|
| 8867 |
+
"loss": 41.4486,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.1425143276587335,
|
| 8872 |
+
"grad_norm": 1.9902695417404175,
|
| 8873 |
+
"learning_rate": 2.0332566332898507e-05,
|
| 8874 |
+
"loss": 41.9137,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.14262725501820953,
|
| 8879 |
+
"grad_norm": 2.0161569118499756,
|
| 8880 |
+
"learning_rate": 2.0212394368294286e-05,
|
| 8881 |
+
"loss": 41.5892,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.14274018237768554,
|
| 8886 |
+
"grad_norm": 1.9097905158996582,
|
| 8887 |
+
"learning_rate": 2.0092538646774072e-05,
|
| 8888 |
+
"loss": 41.7971,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.14285310973716156,
|
| 8893 |
+
"grad_norm": 2.0001113414764404,
|
| 8894 |
+
"learning_rate": 1.9972999643393386e-05,
|
| 8895 |
+
"loss": 41.3731,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.14296603709663758,
|
| 8900 |
+
"grad_norm": 1.767954707145691,
|
| 8901 |
+
"learning_rate": 1.9853777831952437e-05,
|
| 8902 |
+
"loss": 42.0009,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.1430789644561136,
|
| 8907 |
+
"grad_norm": 2.012709140777588,
|
| 8908 |
+
"learning_rate": 1.9734873684994205e-05,
|
| 8909 |
+
"loss": 41.5928,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.1431918918155896,
|
| 8914 |
+
"grad_norm": 1.8496637344360352,
|
| 8915 |
+
"learning_rate": 1.9616287673802568e-05,
|
| 8916 |
+
"loss": 41.5993,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.14330481917506563,
|
| 8921 |
+
"grad_norm": 1.6901943683624268,
|
| 8922 |
+
"learning_rate": 1.949802026840052e-05,
|
| 8923 |
+
"loss": 41.3855,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.14341774653454165,
|
| 8928 |
+
"grad_norm": 1.8437304496765137,
|
| 8929 |
+
"learning_rate": 1.938007193754816e-05,
|
| 8930 |
+
"loss": 41.9748,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.14353067389401766,
|
| 8935 |
+
"grad_norm": 1.9288281202316284,
|
| 8936 |
+
"learning_rate": 1.9262443148740983e-05,
|
| 8937 |
+
"loss": 41.4448,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.14364360125349368,
|
| 8942 |
+
"grad_norm": 1.9183334112167358,
|
| 8943 |
+
"learning_rate": 1.9145134368207916e-05,
|
| 8944 |
+
"loss": 41.684,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.1437565286129697,
|
| 8949 |
+
"grad_norm": 1.8592298030853271,
|
| 8950 |
+
"learning_rate": 1.9028146060909523e-05,
|
| 8951 |
+
"loss": 41.3654,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.14386945597244571,
|
| 8956 |
+
"grad_norm": 2.0269813537597656,
|
| 8957 |
+
"learning_rate": 1.8911478690536177e-05,
|
| 8958 |
+
"loss": 41.7985,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.14398238333192173,
|
| 8963 |
+
"grad_norm": 2.056584358215332,
|
| 8964 |
+
"learning_rate": 1.879513271950616e-05,
|
| 8965 |
+
"loss": 42.1038,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.14409531069139775,
|
| 8970 |
+
"grad_norm": 1.9595996141433716,
|
| 8971 |
+
"learning_rate": 1.8679108608963903e-05,
|
| 8972 |
+
"loss": 41.2163,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.14420823805087377,
|
| 8977 |
+
"grad_norm": 1.8805480003356934,
|
| 8978 |
+
"learning_rate": 1.8563406818778085e-05,
|
| 8979 |
+
"loss": 41.9976,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.14432116541034978,
|
| 8984 |
+
"grad_norm": 1.6963530778884888,
|
| 8985 |
+
"learning_rate": 1.844802780753989e-05,
|
| 8986 |
+
"loss": 41.146,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.1444340927698258,
|
| 8991 |
+
"grad_norm": 1.84190034866333,
|
| 8992 |
+
"learning_rate": 1.8332972032561124e-05,
|
| 8993 |
+
"loss": 41.3635,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.14454702012930182,
|
| 8998 |
+
"grad_norm": 1.915888786315918,
|
| 8999 |
+
"learning_rate": 1.8218239949872428e-05,
|
| 9000 |
+
"loss": 41.7947,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.14465994748877783,
|
| 9005 |
+
"grad_norm": 1.8974988460540771,
|
| 9006 |
+
"learning_rate": 1.8103832014221468e-05,
|
| 9007 |
+
"loss": 41.2438,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.14477287484825385,
|
| 9012 |
+
"grad_norm": 1.9894120693206787,
|
| 9013 |
+
"learning_rate": 1.7989748679071138e-05,
|
| 9014 |
+
"loss": 41.7931,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.14488580220772987,
|
| 9019 |
+
"grad_norm": 1.9796873331069946,
|
| 9020 |
+
"learning_rate": 1.7875990396597752e-05,
|
| 9021 |
+
"loss": 41.4015,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.1449987295672059,
|
| 9026 |
+
"grad_norm": 2.064821720123291,
|
| 9027 |
+
"learning_rate": 1.7762557617689267e-05,
|
| 9028 |
+
"loss": 41.3168,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.1451116569266819,
|
| 9033 |
+
"grad_norm": 1.9183483123779297,
|
| 9034 |
+
"learning_rate": 1.764945079194349e-05,
|
| 9035 |
+
"loss": 41.627,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.14522458428615792,
|
| 9040 |
+
"grad_norm": 2.0172388553619385,
|
| 9041 |
+
"learning_rate": 1.7536670367666264e-05,
|
| 9042 |
+
"loss": 41.6548,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.14533751164563394,
|
| 9047 |
+
"grad_norm": 1.9096386432647705,
|
| 9048 |
+
"learning_rate": 1.7424216791869763e-05,
|
| 9049 |
+
"loss": 41.0315,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.14545043900510995,
|
| 9054 |
+
"grad_norm": 1.959307074546814,
|
| 9055 |
+
"learning_rate": 1.7312090510270627e-05,
|
| 9056 |
+
"loss": 41.0755,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.14556336636458597,
|
| 9061 |
+
"grad_norm": 1.9752603769302368,
|
| 9062 |
+
"learning_rate": 1.7200291967288296e-05,
|
| 9063 |
+
"loss": 41.674,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.145676293724062,
|
| 9068 |
+
"grad_norm": 2.136742353439331,
|
| 9069 |
+
"learning_rate": 1.7088821606043148e-05,
|
| 9070 |
+
"loss": 41.6617,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.145789221083538,
|
| 9075 |
+
"grad_norm": 1.8313026428222656,
|
| 9076 |
+
"learning_rate": 1.6977679868354844e-05,
|
| 9077 |
+
"loss": 41.8354,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.14590214844301402,
|
| 9082 |
+
"grad_norm": 1.9043419361114502,
|
| 9083 |
+
"learning_rate": 1.68668671947405e-05,
|
| 9084 |
+
"loss": 41.2256,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.14601507580249004,
|
| 9089 |
+
"grad_norm": 2.2997522354125977,
|
| 9090 |
+
"learning_rate": 1.675638402441294e-05,
|
| 9091 |
+
"loss": 41.4976,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.14612800316196606,
|
| 9096 |
+
"grad_norm": 2.0536930561065674,
|
| 9097 |
+
"learning_rate": 1.6646230795279026e-05,
|
| 9098 |
+
"loss": 41.8953,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.14624093052144208,
|
| 9103 |
+
"grad_norm": 2.1142418384552,
|
| 9104 |
+
"learning_rate": 1.653640794393785e-05,
|
| 9105 |
+
"loss": 41.0377,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.1463538578809181,
|
| 9110 |
+
"grad_norm": 2.0244696140289307,
|
| 9111 |
+
"learning_rate": 1.6426915905679053e-05,
|
| 9112 |
+
"loss": 41.4281,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.1464667852403941,
|
| 9117 |
+
"grad_norm": 1.9640631675720215,
|
| 9118 |
+
"learning_rate": 1.6317755114481058e-05,
|
| 9119 |
+
"loss": 41.4047,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.14657971259987013,
|
| 9124 |
+
"grad_norm": 1.6655915975570679,
|
| 9125 |
+
"learning_rate": 1.6208926003009394e-05,
|
| 9126 |
+
"loss": 41.3211,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.14669263995934614,
|
| 9131 |
+
"grad_norm": 1.6902838945388794,
|
| 9132 |
+
"learning_rate": 1.610042900261488e-05,
|
| 9133 |
+
"loss": 41.9537,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.14680556731882216,
|
| 9138 |
+
"grad_norm": 1.6776202917099,
|
| 9139 |
+
"learning_rate": 1.5992264543332124e-05,
|
| 9140 |
+
"loss": 41.4585,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.14691849467829818,
|
| 9145 |
+
"grad_norm": 2.098252773284912,
|
| 9146 |
+
"learning_rate": 1.588443305387759e-05,
|
| 9147 |
+
"loss": 41.5836,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.1470314220377742,
|
| 9152 |
+
"grad_norm": 1.992134928703308,
|
| 9153 |
+
"learning_rate": 1.5776934961648005e-05,
|
| 9154 |
+
"loss": 41.386,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.1471443493972502,
|
| 9159 |
+
"grad_norm": 1.8798073530197144,
|
| 9160 |
+
"learning_rate": 1.566977069271872e-05,
|
| 9161 |
+
"loss": 41.1442,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.14725727675672623,
|
| 9166 |
+
"grad_norm": 1.8953096866607666,
|
| 9167 |
+
"learning_rate": 1.556294067184182e-05,
|
| 9168 |
+
"loss": 41.9309,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.14737020411620225,
|
| 9173 |
+
"grad_norm": 2.006721258163452,
|
| 9174 |
+
"learning_rate": 1.5456445322444745e-05,
|
| 9175 |
+
"loss": 41.2013,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.14748313147567826,
|
| 9180 |
+
"grad_norm": 1.963657021522522,
|
| 9181 |
+
"learning_rate": 1.5350285066628343e-05,
|
| 9182 |
+
"loss": 42.0917,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.14759605883515428,
|
| 9187 |
+
"grad_norm": 1.8562333583831787,
|
| 9188 |
+
"learning_rate": 1.5244460325165311e-05,
|
| 9189 |
+
"loss": 41.6725,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.1477089861946303,
|
| 9194 |
+
"grad_norm": 1.8185181617736816,
|
| 9195 |
+
"learning_rate": 1.5138971517498524e-05,
|
| 9196 |
+
"loss": 40.9255,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.14782191355410632,
|
| 9201 |
+
"grad_norm": 1.9565984010696411,
|
| 9202 |
+
"learning_rate": 1.5033819061739373e-05,
|
| 9203 |
+
"loss": 41.4162,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.14793484091358233,
|
| 9208 |
+
"grad_norm": 1.6867700815200806,
|
| 9209 |
+
"learning_rate": 1.4929003374666073e-05,
|
| 9210 |
+
"loss": 41.4384,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.14804776827305835,
|
| 9215 |
+
"grad_norm": 1.9899442195892334,
|
| 9216 |
+
"learning_rate": 1.4824524871722046e-05,
|
| 9217 |
+
"loss": 41.8768,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.14816069563253437,
|
| 9222 |
+
"grad_norm": 2.0754857063293457,
|
| 9223 |
+
"learning_rate": 1.4720383967014306e-05,
|
| 9224 |
+
"loss": 41.4493,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.14827362299201038,
|
| 9229 |
+
"grad_norm": 1.9243446588516235,
|
| 9230 |
+
"learning_rate": 1.4616581073311663e-05,
|
| 9231 |
+
"loss": 41.7055,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.1483865503514864,
|
| 9236 |
+
"grad_norm": 1.9794071912765503,
|
| 9237 |
+
"learning_rate": 1.451311660204333e-05,
|
| 9238 |
+
"loss": 41.4,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.14849947771096242,
|
| 9243 |
+
"grad_norm": 2.126321315765381,
|
| 9244 |
+
"learning_rate": 1.4409990963297093e-05,
|
| 9245 |
+
"loss": 41.4146,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.14861240507043844,
|
| 9250 |
+
"grad_norm": 1.9163738489151,
|
| 9251 |
+
"learning_rate": 1.4307204565817755e-05,
|
| 9252 |
+
"loss": 41.6847,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.14872533242991445,
|
| 9257 |
+
"grad_norm": 1.858212947845459,
|
| 9258 |
+
"learning_rate": 1.4204757817005566e-05,
|
| 9259 |
+
"loss": 41.9409,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.14883825978939047,
|
| 9264 |
+
"grad_norm": 1.9574315547943115,
|
| 9265 |
+
"learning_rate": 1.4102651122914434e-05,
|
| 9266 |
+
"loss": 41.7698,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.1489511871488665,
|
| 9271 |
+
"grad_norm": 1.8031094074249268,
|
| 9272 |
+
"learning_rate": 1.4000884888250598e-05,
|
| 9273 |
+
"loss": 41.7422,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.1490641145083425,
|
| 9278 |
+
"grad_norm": 2.0078442096710205,
|
| 9279 |
+
"learning_rate": 1.3899459516370772e-05,
|
| 9280 |
+
"loss": 40.9253,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.14917704186781852,
|
| 9285 |
+
"grad_norm": 1.8266435861587524,
|
| 9286 |
+
"learning_rate": 1.379837540928065e-05,
|
| 9287 |
+
"loss": 41.4533,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.14928996922729454,
|
| 9292 |
+
"grad_norm": 2.0973446369171143,
|
| 9293 |
+
"learning_rate": 1.3697632967633344e-05,
|
| 9294 |
+
"loss": 40.9431,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.14940289658677056,
|
| 9299 |
+
"grad_norm": 1.6257987022399902,
|
| 9300 |
+
"learning_rate": 1.3597232590727638e-05,
|
| 9301 |
+
"loss": 41.2536,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.14951582394624657,
|
| 9306 |
+
"grad_norm": 1.681249976158142,
|
| 9307 |
+
"learning_rate": 1.3497174676506674e-05,
|
| 9308 |
+
"loss": 41.7958,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.1496287513057226,
|
| 9313 |
+
"grad_norm": 1.6969811916351318,
|
| 9314 |
+
"learning_rate": 1.339745962155613e-05,
|
| 9315 |
+
"loss": 41.5943,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.1497416786651986,
|
| 9320 |
+
"grad_norm": 1.9537434577941895,
|
| 9321 |
+
"learning_rate": 1.3298087821102789e-05,
|
| 9322 |
+
"loss": 41.8075,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.14985460602467462,
|
| 9327 |
+
"grad_norm": 1.8188754320144653,
|
| 9328 |
+
"learning_rate": 1.319905966901286e-05,
|
| 9329 |
+
"loss": 41.5007,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.14996753338415064,
|
| 9334 |
+
"grad_norm": 1.819069266319275,
|
| 9335 |
+
"learning_rate": 1.310037555779049e-05,
|
| 9336 |
+
"loss": 41.9137,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.15008046074362666,
|
| 9341 |
+
"grad_norm": 2.534611701965332,
|
| 9342 |
+
"learning_rate": 1.300203587857629e-05,
|
| 9343 |
+
"loss": 40.8807,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.15019338810310268,
|
| 9348 |
+
"grad_norm": 1.8022189140319824,
|
| 9349 |
+
"learning_rate": 1.2904041021145596e-05,
|
| 9350 |
+
"loss": 41.4565,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.1503063154625787,
|
| 9355 |
+
"grad_norm": 1.970423936843872,
|
| 9356 |
+
"learning_rate": 1.2806391373907089e-05,
|
| 9357 |
+
"loss": 41.7179,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.1504192428220547,
|
| 9362 |
+
"grad_norm": 1.832129716873169,
|
| 9363 |
+
"learning_rate": 1.2709087323901104e-05,
|
| 9364 |
+
"loss": 42.0995,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.15053217018153073,
|
| 9369 |
+
"grad_norm": 1.8537226915359497,
|
| 9370 |
+
"learning_rate": 1.2612129256798221e-05,
|
| 9371 |
+
"loss": 41.1949,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.15064509754100674,
|
| 9376 |
+
"grad_norm": 2.194432497024536,
|
| 9377 |
+
"learning_rate": 1.2515517556897772e-05,
|
| 9378 |
+
"loss": 41.9544,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.15075802490048276,
|
| 9383 |
+
"grad_norm": 2.172084331512451,
|
| 9384 |
+
"learning_rate": 1.241925260712612e-05,
|
| 9385 |
+
"loss": 41.4681,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.15087095225995878,
|
| 9390 |
+
"grad_norm": 2.101428270339966,
|
| 9391 |
+
"learning_rate": 1.2323334789035367e-05,
|
| 9392 |
+
"loss": 41.2451,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.1509838796194348,
|
| 9397 |
+
"grad_norm": 2.0989208221435547,
|
| 9398 |
+
"learning_rate": 1.2227764482801607e-05,
|
| 9399 |
+
"loss": 41.4428,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.1510968069789108,
|
| 9404 |
+
"grad_norm": 1.8533416986465454,
|
| 9405 |
+
"learning_rate": 1.2132542067223685e-05,
|
| 9406 |
+
"loss": 41.312,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.15120973433838683,
|
| 9411 |
+
"grad_norm": 1.8072376251220703,
|
| 9412 |
+
"learning_rate": 1.2037667919721506e-05,
|
| 9413 |
+
"loss": 41.5472,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.15132266169786285,
|
| 9418 |
+
"grad_norm": 2.1498804092407227,
|
| 9419 |
+
"learning_rate": 1.1943142416334596e-05,
|
| 9420 |
+
"loss": 41.6282,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.15143558905733887,
|
| 9425 |
+
"grad_norm": 1.925567865371704,
|
| 9426 |
+
"learning_rate": 1.1848965931720569e-05,
|
| 9427 |
+
"loss": 41.917,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.15154851641681488,
|
| 9432 |
+
"grad_norm": 2.3730688095092773,
|
| 9433 |
+
"learning_rate": 1.17551388391537e-05,
|
| 9434 |
+
"loss": 41.4126,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.1516614437762909,
|
| 9439 |
+
"grad_norm": 1.9631848335266113,
|
| 9440 |
+
"learning_rate": 1.1661661510523502e-05,
|
| 9441 |
+
"loss": 41.4355,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.15177437113576692,
|
| 9446 |
+
"grad_norm": 2.441002368927002,
|
| 9447 |
+
"learning_rate": 1.1568534316333101e-05,
|
| 9448 |
+
"loss": 41.6679,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.15188729849524293,
|
| 9453 |
+
"grad_norm": 1.8539143800735474,
|
| 9454 |
+
"learning_rate": 1.1475757625697858e-05,
|
| 9455 |
+
"loss": 41.5066,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.15200022585471895,
|
| 9460 |
+
"grad_norm": 1.8052140474319458,
|
| 9461 |
+
"learning_rate": 1.1383331806343878e-05,
|
| 9462 |
+
"loss": 41.397,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.15211315321419497,
|
| 9467 |
+
"grad_norm": 2.0468220710754395,
|
| 9468 |
+
"learning_rate": 1.1291257224606577e-05,
|
| 9469 |
+
"loss": 41.2182,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.15222608057367099,
|
| 9474 |
+
"grad_norm": 1.8978148698806763,
|
| 9475 |
+
"learning_rate": 1.1199534245429255e-05,
|
| 9476 |
+
"loss": 41.1488,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.152339007933147,
|
| 9481 |
+
"grad_norm": 1.5564873218536377,
|
| 9482 |
+
"learning_rate": 1.1108163232361602e-05,
|
| 9483 |
+
"loss": 41.7649,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.15245193529262302,
|
| 9488 |
+
"grad_norm": 1.891811490058899,
|
| 9489 |
+
"learning_rate": 1.1017144547558178e-05,
|
| 9490 |
+
"loss": 41.6163,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.15256486265209904,
|
| 9495 |
+
"grad_norm": 1.769851565361023,
|
| 9496 |
+
"learning_rate": 1.0926478551777197e-05,
|
| 9497 |
+
"loss": 41.5599,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.15267779001157505,
|
| 9502 |
+
"grad_norm": 1.988309383392334,
|
| 9503 |
+
"learning_rate": 1.0836165604378868e-05,
|
| 9504 |
+
"loss": 41.5409,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.15279071737105107,
|
| 9509 |
+
"grad_norm": 2.5843052864074707,
|
| 9510 |
+
"learning_rate": 1.074620606332416e-05,
|
| 9511 |
+
"loss": 41.8119,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.1529036447305271,
|
| 9516 |
+
"grad_norm": 1.761756181716919,
|
| 9517 |
+
"learning_rate": 1.0656600285173258e-05,
|
| 9518 |
+
"loss": 41.1715,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.1530165720900031,
|
| 9523 |
+
"grad_norm": 2.01920485496521,
|
| 9524 |
+
"learning_rate": 1.0567348625084127e-05,
|
| 9525 |
+
"loss": 40.9651,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.15312949944947912,
|
| 9530 |
+
"grad_norm": 1.7967004776000977,
|
| 9531 |
+
"learning_rate": 1.0478451436811232e-05,
|
| 9532 |
+
"loss": 41.6348,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.15324242680895514,
|
| 9537 |
+
"grad_norm": 1.9052681922912598,
|
| 9538 |
+
"learning_rate": 1.0389909072704041e-05,
|
| 9539 |
+
"loss": 41.7058,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.15335535416843116,
|
| 9544 |
+
"grad_norm": 1.700021505355835,
|
| 9545 |
+
"learning_rate": 1.0301721883705683e-05,
|
| 9546 |
+
"loss": 41.0034,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.15346828152790717,
|
| 9551 |
+
"grad_norm": 1.8079664707183838,
|
| 9552 |
+
"learning_rate": 1.0213890219351518e-05,
|
| 9553 |
+
"loss": 41.747,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.1535812088873832,
|
| 9558 |
+
"grad_norm": 1.777335286140442,
|
| 9559 |
+
"learning_rate": 1.0126414427767717e-05,
|
| 9560 |
+
"loss": 41.7372,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.1536941362468592,
|
| 9565 |
+
"grad_norm": 1.8270072937011719,
|
| 9566 |
+
"learning_rate": 1.0039294855669957e-05,
|
| 9567 |
+
"loss": 41.8482,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.15380706360633523,
|
| 9572 |
+
"grad_norm": 2.1357474327087402,
|
| 9573 |
+
"learning_rate": 9.952531848362057e-06,
|
| 9574 |
+
"loss": 41.7621,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.15391999096581124,
|
| 9579 |
+
"grad_norm": 2.1133153438568115,
|
| 9580 |
+
"learning_rate": 9.866125749734534e-06,
|
| 9581 |
+
"loss": 40.9865,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.15403291832528726,
|
| 9586 |
+
"grad_norm": 1.882265567779541,
|
| 9587 |
+
"learning_rate": 9.780076902263247e-06,
|
| 9588 |
+
"loss": 41.8581,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.15414584568476328,
|
| 9593 |
+
"grad_norm": 1.9558875560760498,
|
| 9594 |
+
"learning_rate": 9.694385647008108e-06,
|
| 9595 |
+
"loss": 41.558,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.1542587730442393,
|
| 9600 |
+
"grad_norm": 2.2181105613708496,
|
| 9601 |
+
"learning_rate": 9.609052323611666e-06,
|
| 9602 |
+
"loss": 41.0961,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.1543717004037153,
|
| 9607 |
+
"grad_norm": 2.1352272033691406,
|
| 9608 |
+
"learning_rate": 9.524077270297837e-06,
|
| 9609 |
+
"loss": 41.8123,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.15448462776319133,
|
| 9614 |
+
"grad_norm": 2.2755446434020996,
|
| 9615 |
+
"learning_rate": 9.439460823870471e-06,
|
| 9616 |
+
"loss": 41.2714,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.15459755512266735,
|
| 9621 |
+
"grad_norm": 2.072122812271118,
|
| 9622 |
+
"learning_rate": 9.355203319712025e-06,
|
| 9623 |
+
"loss": 41.0492,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.15471048248214336,
|
| 9628 |
+
"grad_norm": 1.640592098236084,
|
| 9629 |
+
"learning_rate": 9.271305091782312e-06,
|
| 9630 |
+
"loss": 41.4504,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.15482340984161938,
|
| 9635 |
+
"grad_norm": 1.7952368259429932,
|
| 9636 |
+
"learning_rate": 9.187766472617099e-06,
|
| 9637 |
+
"loss": 41.5292,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.1549363372010954,
|
| 9642 |
+
"grad_norm": 2.1799731254577637,
|
| 9643 |
+
"learning_rate": 9.104587793326901e-06,
|
| 9644 |
+
"loss": 41.4661,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.15504926456057141,
|
| 9649 |
+
"grad_norm": 1.7992476224899292,
|
| 9650 |
+
"learning_rate": 9.021769383595502e-06,
|
| 9651 |
+
"loss": 41.6134,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.15516219192004743,
|
| 9656 |
+
"grad_norm": 2.11899995803833,
|
| 9657 |
+
"learning_rate": 8.939311571678754e-06,
|
| 9658 |
+
"loss": 41.6517,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.15527511927952345,
|
| 9663 |
+
"grad_norm": 2.0447146892547607,
|
| 9664 |
+
"learning_rate": 8.85721468440327e-06,
|
| 9665 |
+
"loss": 41.2195,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.15538804663899947,
|
| 9670 |
+
"grad_norm": 2.048393487930298,
|
| 9671 |
+
"learning_rate": 8.775479047165102e-06,
|
| 9672 |
+
"loss": 41.9072,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.15550097399847548,
|
| 9677 |
+
"grad_norm": 1.9899383783340454,
|
| 9678 |
+
"learning_rate": 8.69410498392853e-06,
|
| 9679 |
+
"loss": 41.5468,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.1556139013579515,
|
| 9684 |
+
"grad_norm": 1.7755231857299805,
|
| 9685 |
+
"learning_rate": 8.613092817224611e-06,
|
| 9686 |
+
"loss": 41.2212,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.15572682871742752,
|
| 9691 |
+
"grad_norm": 1.9533270597457886,
|
| 9692 |
+
"learning_rate": 8.53244286815006e-06,
|
| 9693 |
+
"loss": 41.5454,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.15583975607690354,
|
| 9698 |
+
"grad_norm": 1.5811653137207031,
|
| 9699 |
+
"learning_rate": 8.452155456365918e-06,
|
| 9700 |
+
"loss": 40.8594,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.15595268343637955,
|
| 9705 |
+
"grad_norm": 2.1725199222564697,
|
| 9706 |
+
"learning_rate": 8.372230900096256e-06,
|
| 9707 |
+
"loss": 41.0134,
|
| 9708 |
+
"step": 1381
|
| 9709 |
+
},
|
| 9710 |
+
{
|
| 9711 |
+
"epoch": 0.15606561079585557,
|
| 9712 |
+
"grad_norm": 2.179192543029785,
|
| 9713 |
+
"learning_rate": 8.292669516127039e-06,
|
| 9714 |
+
"loss": 41.9176,
|
| 9715 |
+
"step": 1382
|
| 9716 |
+
},
|
| 9717 |
+
{
|
| 9718 |
+
"epoch": 0.1561785381553316,
|
| 9719 |
+
"grad_norm": 1.90076744556427,
|
| 9720 |
+
"learning_rate": 8.213471619804647e-06,
|
| 9721 |
+
"loss": 41.0984,
|
| 9722 |
+
"step": 1383
|
| 9723 |
+
},
|
| 9724 |
+
{
|
| 9725 |
+
"epoch": 0.1562914655148076,
|
| 9726 |
+
"grad_norm": 2.024564027786255,
|
| 9727 |
+
"learning_rate": 8.134637525034839e-06,
|
| 9728 |
+
"loss": 41.1977,
|
| 9729 |
+
"step": 1384
|
| 9730 |
+
},
|
| 9731 |
+
{
|
| 9732 |
+
"epoch": 0.15640439287428362,
|
| 9733 |
+
"grad_norm": 1.956028699874878,
|
| 9734 |
+
"learning_rate": 8.056167544281378e-06,
|
| 9735 |
+
"loss": 41.295,
|
| 9736 |
+
"step": 1385
|
| 9737 |
+
},
|
| 9738 |
+
{
|
| 9739 |
+
"epoch": 0.15651732023375964,
|
| 9740 |
+
"grad_norm": 2.1138486862182617,
|
| 9741 |
+
"learning_rate": 7.978061988564867e-06,
|
| 9742 |
+
"loss": 42.0955,
|
| 9743 |
+
"step": 1386
|
| 9744 |
+
},
|
| 9745 |
+
{
|
| 9746 |
+
"epoch": 0.15663024759323566,
|
| 9747 |
+
"grad_norm": 1.8865984678268433,
|
| 9748 |
+
"learning_rate": 7.900321167461523e-06,
|
| 9749 |
+
"loss": 41.1149,
|
| 9750 |
+
"step": 1387
|
| 9751 |
+
},
|
| 9752 |
+
{
|
| 9753 |
+
"epoch": 0.15674317495271167,
|
| 9754 |
+
"grad_norm": 1.7538444995880127,
|
| 9755 |
+
"learning_rate": 7.822945389101832e-06,
|
| 9756 |
+
"loss": 41.9549,
|
| 9757 |
+
"step": 1388
|
| 9758 |
+
},
|
| 9759 |
+
{
|
| 9760 |
+
"epoch": 0.1568561023121877,
|
| 9761 |
+
"grad_norm": 2.070150375366211,
|
| 9762 |
+
"learning_rate": 7.745934960169454e-06,
|
| 9763 |
+
"loss": 41.8016,
|
| 9764 |
+
"step": 1389
|
| 9765 |
+
},
|
| 9766 |
+
{
|
| 9767 |
+
"epoch": 0.1569690296716637,
|
| 9768 |
+
"grad_norm": 1.899046778678894,
|
| 9769 |
+
"learning_rate": 7.669290185899946e-06,
|
| 9770 |
+
"loss": 41.3563,
|
| 9771 |
+
"step": 1390
|
| 9772 |
+
},
|
| 9773 |
+
{
|
| 9774 |
+
"epoch": 0.15708195703113972,
|
| 9775 |
+
"grad_norm": 2.0018699169158936,
|
| 9776 |
+
"learning_rate": 7.59301137007965e-06,
|
| 9777 |
+
"loss": 41.299,
|
| 9778 |
+
"step": 1391
|
| 9779 |
+
},
|
| 9780 |
+
{
|
| 9781 |
+
"epoch": 0.15719488439061574,
|
| 9782 |
+
"grad_norm": 2.224987030029297,
|
| 9783 |
+
"learning_rate": 7.517098815044288e-06,
|
| 9784 |
+
"loss": 41.5934,
|
| 9785 |
+
"step": 1392
|
| 9786 |
+
},
|
| 9787 |
+
{
|
| 9788 |
+
"epoch": 0.15730781175009176,
|
| 9789 |
+
"grad_norm": 1.7218855619430542,
|
| 9790 |
+
"learning_rate": 7.441552821677966e-06,
|
| 9791 |
+
"loss": 41.4066,
|
| 9792 |
+
"step": 1393
|
| 9793 |
+
},
|
| 9794 |
+
{
|
| 9795 |
+
"epoch": 0.15742073910956778,
|
| 9796 |
+
"grad_norm": 1.9281781911849976,
|
| 9797 |
+
"learning_rate": 7.366373689411865e-06,
|
| 9798 |
+
"loss": 41.7988,
|
| 9799 |
+
"step": 1394
|
| 9800 |
+
},
|
| 9801 |
+
{
|
| 9802 |
+
"epoch": 0.1575336664690438,
|
| 9803 |
+
"grad_norm": 1.8952256441116333,
|
| 9804 |
+
"learning_rate": 7.291561716223094e-06,
|
| 9805 |
+
"loss": 41.6274,
|
| 9806 |
+
"step": 1395
|
| 9807 |
+
},
|
| 9808 |
+
{
|
| 9809 |
+
"epoch": 0.1576465938285198,
|
| 9810 |
+
"grad_norm": 1.6827173233032227,
|
| 9811 |
+
"learning_rate": 7.217117198633561e-06,
|
| 9812 |
+
"loss": 41.794,
|
| 9813 |
+
"step": 1396
|
| 9814 |
+
},
|
| 9815 |
+
{
|
| 9816 |
+
"epoch": 0.15775952118799583,
|
| 9817 |
+
"grad_norm": 2.3626434803009033,
|
| 9818 |
+
"learning_rate": 7.143040431708625e-06,
|
| 9819 |
+
"loss": 41.8248,
|
| 9820 |
+
"step": 1397
|
| 9821 |
+
},
|
| 9822 |
+
{
|
| 9823 |
+
"epoch": 0.15787244854747184,
|
| 9824 |
+
"grad_norm": 1.7941502332687378,
|
| 9825 |
+
"learning_rate": 7.069331709056159e-06,
|
| 9826 |
+
"loss": 41.255,
|
| 9827 |
+
"step": 1398
|
| 9828 |
+
},
|
| 9829 |
+
{
|
| 9830 |
+
"epoch": 0.15798537590694786,
|
| 9831 |
+
"grad_norm": 1.8452963829040527,
|
| 9832 |
+
"learning_rate": 6.995991322825191e-06,
|
| 9833 |
+
"loss": 41.3533,
|
| 9834 |
+
"step": 1399
|
| 9835 |
+
},
|
| 9836 |
+
{
|
| 9837 |
+
"epoch": 0.15809830326642388,
|
| 9838 |
+
"grad_norm": 2.0860519409179688,
|
| 9839 |
+
"learning_rate": 6.923019563704847e-06,
|
| 9840 |
+
"loss": 42.1139,
|
| 9841 |
+
"step": 1400
|
| 9842 |
+
},
|
| 9843 |
+
{
|
| 9844 |
+
"epoch": 0.1582112306258999,
|
| 9845 |
+
"grad_norm": 1.7720417976379395,
|
| 9846 |
+
"learning_rate": 6.8504167209232364e-06,
|
| 9847 |
+
"loss": 41.8679,
|
| 9848 |
+
"step": 1401
|
| 9849 |
+
},
|
| 9850 |
+
{
|
| 9851 |
+
"epoch": 0.1583241579853759,
|
| 9852 |
+
"grad_norm": 1.9282312393188477,
|
| 9853 |
+
"learning_rate": 6.778183082246148e-06,
|
| 9854 |
+
"loss": 41.0663,
|
| 9855 |
+
"step": 1402
|
| 9856 |
+
},
|
| 9857 |
+
{
|
| 9858 |
+
"epoch": 0.15843708534485193,
|
| 9859 |
+
"grad_norm": 1.9489457607269287,
|
| 9860 |
+
"learning_rate": 6.706318933976064e-06,
|
| 9861 |
+
"loss": 41.404,
|
| 9862 |
+
"step": 1403
|
| 9863 |
+
},
|
| 9864 |
+
{
|
| 9865 |
+
"epoch": 0.15855001270432795,
|
| 9866 |
+
"grad_norm": 1.6636449098587036,
|
| 9867 |
+
"learning_rate": 6.6348245609509475e-06,
|
| 9868 |
+
"loss": 41.228,
|
| 9869 |
+
"step": 1404
|
| 9870 |
+
},
|
| 9871 |
+
{
|
| 9872 |
+
"epoch": 0.15866294006380396,
|
| 9873 |
+
"grad_norm": 2.0419087409973145,
|
| 9874 |
+
"learning_rate": 6.563700246543159e-06,
|
| 9875 |
+
"loss": 41.4435,
|
| 9876 |
+
"step": 1405
|
| 9877 |
+
},
|
| 9878 |
+
{
|
| 9879 |
+
"epoch": 0.15877586742327998,
|
| 9880 |
+
"grad_norm": 1.7180472612380981,
|
| 9881 |
+
"learning_rate": 6.492946272658296e-06,
|
| 9882 |
+
"loss": 41.9002,
|
| 9883 |
+
"step": 1406
|
| 9884 |
+
},
|
| 9885 |
+
{
|
| 9886 |
+
"epoch": 0.158888794782756,
|
| 9887 |
+
"grad_norm": 2.0674068927764893,
|
| 9888 |
+
"learning_rate": 6.422562919734076e-06,
|
| 9889 |
+
"loss": 41.0515,
|
| 9890 |
+
"step": 1407
|
| 9891 |
+
},
|
| 9892 |
+
{
|
| 9893 |
+
"epoch": 0.15900172214223202,
|
| 9894 |
+
"grad_norm": 2.2271409034729004,
|
| 9895 |
+
"learning_rate": 6.3525504667392595e-06,
|
| 9896 |
+
"loss": 40.5825,
|
| 9897 |
+
"step": 1408
|
| 9898 |
+
},
|
| 9899 |
+
{
|
| 9900 |
+
"epoch": 0.15911464950170803,
|
| 9901 |
+
"grad_norm": 2.2412948608398438,
|
| 9902 |
+
"learning_rate": 6.282909191172504e-06,
|
| 9903 |
+
"loss": 41.7428,
|
| 9904 |
+
"step": 1409
|
| 9905 |
+
},
|
| 9906 |
+
{
|
| 9907 |
+
"epoch": 0.15922757686118405,
|
| 9908 |
+
"grad_norm": 1.8388715982437134,
|
| 9909 |
+
"learning_rate": 6.213639369061297e-06,
|
| 9910 |
+
"loss": 41.7851,
|
| 9911 |
+
"step": 1410
|
| 9912 |
+
},
|
| 9913 |
+
{
|
| 9914 |
+
"epoch": 0.15934050422066007,
|
| 9915 |
+
"grad_norm": 1.7997721433639526,
|
| 9916 |
+
"learning_rate": 6.14474127496083e-06,
|
| 9917 |
+
"loss": 41.645,
|
| 9918 |
+
"step": 1411
|
| 9919 |
+
},
|
| 9920 |
+
{
|
| 9921 |
+
"epoch": 0.15945343158013608,
|
| 9922 |
+
"grad_norm": 1.7507959604263306,
|
| 9923 |
+
"learning_rate": 6.076215181952949e-06,
|
| 9924 |
+
"loss": 41.7719,
|
| 9925 |
+
"step": 1412
|
| 9926 |
+
},
|
| 9927 |
+
{
|
| 9928 |
+
"epoch": 0.1595663589396121,
|
| 9929 |
+
"grad_norm": 1.7743481397628784,
|
| 9930 |
+
"learning_rate": 6.008061361645034e-06,
|
| 9931 |
+
"loss": 41.7183,
|
| 9932 |
+
"step": 1413
|
| 9933 |
+
},
|
| 9934 |
+
{
|
| 9935 |
+
"epoch": 0.15967928629908812,
|
| 9936 |
+
"grad_norm": 2.146124839782715,
|
| 9937 |
+
"learning_rate": 5.940280084168947e-06,
|
| 9938 |
+
"loss": 41.5831,
|
| 9939 |
+
"step": 1414
|
| 9940 |
+
},
|
| 9941 |
+
{
|
| 9942 |
+
"epoch": 0.15979221365856414,
|
| 9943 |
+
"grad_norm": 1.9941959381103516,
|
| 9944 |
+
"learning_rate": 5.872871618179953e-06,
|
| 9945 |
+
"loss": 41.142,
|
| 9946 |
+
"step": 1415
|
| 9947 |
+
},
|
| 9948 |
+
{
|
| 9949 |
+
"epoch": 0.15990514101804015,
|
| 9950 |
+
"grad_norm": 2.4689252376556396,
|
| 9951 |
+
"learning_rate": 5.805836230855655e-06,
|
| 9952 |
+
"loss": 41.0021,
|
| 9953 |
+
"step": 1416
|
| 9954 |
+
},
|
| 9955 |
+
{
|
| 9956 |
+
"epoch": 0.16001806837751617,
|
| 9957 |
+
"grad_norm": 2.225574016571045,
|
| 9958 |
+
"learning_rate": 5.739174187894925e-06,
|
| 9959 |
+
"loss": 40.9235,
|
| 9960 |
+
"step": 1417
|
| 9961 |
+
},
|
| 9962 |
+
{
|
| 9963 |
+
"epoch": 0.1601309957369922,
|
| 9964 |
+
"grad_norm": 1.8958646059036255,
|
| 9965 |
+
"learning_rate": 5.672885753516876e-06,
|
| 9966 |
+
"loss": 41.13,
|
| 9967 |
+
"step": 1418
|
| 9968 |
+
},
|
| 9969 |
+
{
|
| 9970 |
+
"epoch": 0.1602439230964682,
|
| 9971 |
+
"grad_norm": 1.6158885955810547,
|
| 9972 |
+
"learning_rate": 5.606971190459809e-06,
|
| 9973 |
+
"loss": 41.9244,
|
| 9974 |
+
"step": 1419
|
| 9975 |
+
},
|
| 9976 |
+
{
|
| 9977 |
+
"epoch": 0.16035685045594422,
|
| 9978 |
+
"grad_norm": 1.825427532196045,
|
| 9979 |
+
"learning_rate": 5.541430759980138e-06,
|
| 9980 |
+
"loss": 41.6571,
|
| 9981 |
+
"step": 1420
|
| 9982 |
+
},
|
| 9983 |
+
{
|
| 9984 |
+
"epoch": 0.16046977781542024,
|
| 9985 |
+
"grad_norm": 1.889722466468811,
|
| 9986 |
+
"learning_rate": 5.476264721851387e-06,
|
| 9987 |
+
"loss": 41.3371,
|
| 9988 |
+
"step": 1421
|
| 9989 |
+
},
|
| 9990 |
+
{
|
| 9991 |
+
"epoch": 0.16058270517489626,
|
| 9992 |
+
"grad_norm": 1.7982354164123535,
|
| 9993 |
+
"learning_rate": 5.411473334363171e-06,
|
| 9994 |
+
"loss": 41.0663,
|
| 9995 |
+
"step": 1422
|
| 9996 |
+
},
|
| 9997 |
+
{
|
| 9998 |
+
"epoch": 0.16069563253437227,
|
| 9999 |
+
"grad_norm": 1.6511905193328857,
|
| 10000 |
+
"learning_rate": 5.3470568543201314e-06,
|
| 10001 |
+
"loss": 42.2116,
|
| 10002 |
+
"step": 1423
|
| 10003 |
+
},
|
| 10004 |
+
{
|
| 10005 |
+
"epoch": 0.1608085598938483,
|
| 10006 |
+
"grad_norm": 1.9496500492095947,
|
| 10007 |
+
"learning_rate": 5.283015537040958e-06,
|
| 10008 |
+
"loss": 41.1136,
|
| 10009 |
+
"step": 1424
|
| 10010 |
+
},
|
| 10011 |
+
{
|
| 10012 |
+
"epoch": 0.1609214872533243,
|
| 10013 |
+
"grad_norm": 1.9211938381195068,
|
| 10014 |
+
"learning_rate": 5.2193496363573516e-06,
|
| 10015 |
+
"loss": 42.2849,
|
| 10016 |
+
"step": 1425
|
| 10017 |
+
},
|
| 10018 |
+
{
|
| 10019 |
+
"epoch": 0.16103441461280033,
|
| 10020 |
+
"grad_norm": 1.7339675426483154,
|
| 10021 |
+
"learning_rate": 5.1560594046130115e-06,
|
| 10022 |
+
"loss": 41.3585,
|
| 10023 |
+
"step": 1426
|
| 10024 |
+
},
|
| 10025 |
+
{
|
| 10026 |
+
"epoch": 0.16114734197227634,
|
| 10027 |
+
"grad_norm": 1.8082672357559204,
|
| 10028 |
+
"learning_rate": 5.093145092662676e-06,
|
| 10029 |
+
"loss": 41.3625,
|
| 10030 |
+
"step": 1427
|
| 10031 |
+
},
|
| 10032 |
+
{
|
| 10033 |
+
"epoch": 0.16126026933175236,
|
| 10034 |
+
"grad_norm": 1.9669008255004883,
|
| 10035 |
+
"learning_rate": 5.0306069498710974e-06,
|
| 10036 |
+
"loss": 41.6191,
|
| 10037 |
+
"step": 1428
|
| 10038 |
+
},
|
| 10039 |
+
{
|
| 10040 |
+
"epoch": 0.16137319669122838,
|
| 10041 |
+
"grad_norm": 2.0831713676452637,
|
| 10042 |
+
"learning_rate": 4.968445224112051e-06,
|
| 10043 |
+
"loss": 40.7883,
|
| 10044 |
+
"step": 1429
|
| 10045 |
+
},
|
| 10046 |
+
{
|
| 10047 |
+
"epoch": 0.1614861240507044,
|
| 10048 |
+
"grad_norm": 2.5951004028320312,
|
| 10049 |
+
"learning_rate": 4.9066601617673515e-06,
|
| 10050 |
+
"loss": 42.2429,
|
| 10051 |
+
"step": 1430
|
| 10052 |
+
},
|
| 10053 |
+
{
|
| 10054 |
+
"epoch": 0.1615990514101804,
|
| 10055 |
+
"grad_norm": 1.892146348953247,
|
| 10056 |
+
"learning_rate": 4.845252007725909e-06,
|
| 10057 |
+
"loss": 42.0542,
|
| 10058 |
+
"step": 1431
|
| 10059 |
+
},
|
| 10060 |
+
{
|
| 10061 |
+
"epoch": 0.16171197876965643,
|
| 10062 |
+
"grad_norm": 1.7356703281402588,
|
| 10063 |
+
"learning_rate": 4.784221005382705e-06,
|
| 10064 |
+
"loss": 41.5224,
|
| 10065 |
+
"step": 1432
|
| 10066 |
+
},
|
| 10067 |
+
{
|
| 10068 |
+
"epoch": 0.16182490612913245,
|
| 10069 |
+
"grad_norm": 1.9348182678222656,
|
| 10070 |
+
"learning_rate": 4.723567396637873e-06,
|
| 10071 |
+
"loss": 41.4315,
|
| 10072 |
+
"step": 1433
|
| 10073 |
+
},
|
| 10074 |
+
{
|
| 10075 |
+
"epoch": 0.16193783348860846,
|
| 10076 |
+
"grad_norm": 1.8017065525054932,
|
| 10077 |
+
"learning_rate": 4.663291421895743e-06,
|
| 10078 |
+
"loss": 41.125,
|
| 10079 |
+
"step": 1434
|
| 10080 |
+
},
|
| 10081 |
+
{
|
| 10082 |
+
"epoch": 0.16205076084808448,
|
| 10083 |
+
"grad_norm": 2.100236654281616,
|
| 10084 |
+
"learning_rate": 4.603393320063831e-06,
|
| 10085 |
+
"loss": 40.9065,
|
| 10086 |
+
"step": 1435
|
| 10087 |
+
},
|
| 10088 |
+
{
|
| 10089 |
+
"epoch": 0.1621636882075605,
|
| 10090 |
+
"grad_norm": 1.7695626020431519,
|
| 10091 |
+
"learning_rate": 4.543873328551951e-06,
|
| 10092 |
+
"loss": 41.7916,
|
| 10093 |
+
"step": 1436
|
| 10094 |
+
},
|
| 10095 |
+
{
|
| 10096 |
+
"epoch": 0.16227661556703651,
|
| 10097 |
+
"grad_norm": 1.8013639450073242,
|
| 10098 |
+
"learning_rate": 4.484731683271259e-06,
|
| 10099 |
+
"loss": 41.3565,
|
| 10100 |
+
"step": 1437
|
| 10101 |
+
},
|
| 10102 |
+
{
|
| 10103 |
+
"epoch": 0.16238954292651253,
|
| 10104 |
+
"grad_norm": 1.7616043090820312,
|
| 10105 |
+
"learning_rate": 4.425968618633292e-06,
|
| 10106 |
+
"loss": 41.7228,
|
| 10107 |
+
"step": 1438
|
| 10108 |
+
},
|
| 10109 |
+
{
|
| 10110 |
+
"epoch": 0.16250247028598855,
|
| 10111 |
+
"grad_norm": 2.29423451423645,
|
| 10112 |
+
"learning_rate": 4.367584367549082e-06,
|
| 10113 |
+
"loss": 41.421,
|
| 10114 |
+
"step": 1439
|
| 10115 |
+
},
|
| 10116 |
+
{
|
| 10117 |
+
"epoch": 0.16261539764546457,
|
| 10118 |
+
"grad_norm": 2.2017362117767334,
|
| 10119 |
+
"learning_rate": 4.309579161428201e-06,
|
| 10120 |
+
"loss": 40.3983,
|
| 10121 |
+
"step": 1440
|
| 10122 |
+
},
|
| 10123 |
+
{
|
| 10124 |
+
"epoch": 0.16272832500494058,
|
| 10125 |
+
"grad_norm": 2.069312810897827,
|
| 10126 |
+
"learning_rate": 4.251953230177852e-06,
|
| 10127 |
+
"loss": 41.8968,
|
| 10128 |
+
"step": 1441
|
| 10129 |
+
},
|
| 10130 |
+
{
|
| 10131 |
+
"epoch": 0.1628412523644166,
|
| 10132 |
+
"grad_norm": 2.1637330055236816,
|
| 10133 |
+
"learning_rate": 4.1947068022019646e-06,
|
| 10134 |
+
"loss": 41.6269,
|
| 10135 |
+
"step": 1442
|
| 10136 |
+
},
|
| 10137 |
+
{
|
| 10138 |
+
"epoch": 0.16295417972389262,
|
| 10139 |
+
"grad_norm": 2.2565691471099854,
|
| 10140 |
+
"learning_rate": 4.137840104400259e-06,
|
| 10141 |
+
"loss": 41.8184,
|
| 10142 |
+
"step": 1443
|
| 10143 |
+
},
|
| 10144 |
+
{
|
| 10145 |
+
"epoch": 0.16306710708336863,
|
| 10146 |
+
"grad_norm": 1.894077181816101,
|
| 10147 |
+
"learning_rate": 4.081353362167406e-06,
|
| 10148 |
+
"loss": 41.1551,
|
| 10149 |
+
"step": 1444
|
| 10150 |
+
},
|
| 10151 |
+
{
|
| 10152 |
+
"epoch": 0.16318003444284465,
|
| 10153 |
+
"grad_norm": 2.083996534347534,
|
| 10154 |
+
"learning_rate": 4.02524679939208e-06,
|
| 10155 |
+
"loss": 41.4961,
|
| 10156 |
+
"step": 1445
|
| 10157 |
+
},
|
| 10158 |
+
{
|
| 10159 |
+
"epoch": 0.16329296180232067,
|
| 10160 |
+
"grad_norm": 2.0690250396728516,
|
| 10161 |
+
"learning_rate": 3.969520638456103e-06,
|
| 10162 |
+
"loss": 41.3856,
|
| 10163 |
+
"step": 1446
|
| 10164 |
+
},
|
| 10165 |
+
{
|
| 10166 |
+
"epoch": 0.16340588916179669,
|
| 10167 |
+
"grad_norm": 2.0313730239868164,
|
| 10168 |
+
"learning_rate": 3.91417510023353e-06,
|
| 10169 |
+
"loss": 41.5016,
|
| 10170 |
+
"step": 1447
|
| 10171 |
+
},
|
| 10172 |
+
{
|
| 10173 |
+
"epoch": 0.1635188165212727,
|
| 10174 |
+
"grad_norm": 2.005993604660034,
|
| 10175 |
+
"learning_rate": 3.859210404089808e-06,
|
| 10176 |
+
"loss": 41.0587,
|
| 10177 |
+
"step": 1448
|
| 10178 |
+
},
|
| 10179 |
+
{
|
| 10180 |
+
"epoch": 0.16363174388074872,
|
| 10181 |
+
"grad_norm": 1.8808915615081787,
|
| 10182 |
+
"learning_rate": 3.8046267678809034e-06,
|
| 10183 |
+
"loss": 41.5436,
|
| 10184 |
+
"step": 1449
|
| 10185 |
+
},
|
| 10186 |
+
{
|
| 10187 |
+
"epoch": 0.16374467124022474,
|
| 10188 |
+
"grad_norm": 2.079089879989624,
|
| 10189 |
+
"learning_rate": 3.750424407952402e-06,
|
| 10190 |
+
"loss": 41.5487,
|
| 10191 |
+
"step": 1450
|
| 10192 |
+
},
|
| 10193 |
+
{
|
| 10194 |
+
"epoch": 0.16385759859970075,
|
| 10195 |
+
"grad_norm": 1.9183429479599,
|
| 10196 |
+
"learning_rate": 3.696603539138699e-06,
|
| 10197 |
+
"loss": 42.0131,
|
| 10198 |
+
"step": 1451
|
| 10199 |
+
},
|
| 10200 |
+
{
|
| 10201 |
+
"epoch": 0.16397052595917677,
|
| 10202 |
+
"grad_norm": 2.3428916931152344,
|
| 10203 |
+
"learning_rate": 3.6431643747621203e-06,
|
| 10204 |
+
"loss": 41.0731,
|
| 10205 |
+
"step": 1452
|
| 10206 |
+
},
|
| 10207 |
+
{
|
| 10208 |
+
"epoch": 0.1640834533186528,
|
| 10209 |
+
"grad_norm": 1.6380535364151,
|
| 10210 |
+
"learning_rate": 3.5901071266320805e-06,
|
| 10211 |
+
"loss": 41.8139,
|
| 10212 |
+
"step": 1453
|
| 10213 |
+
},
|
| 10214 |
+
{
|
| 10215 |
+
"epoch": 0.1641963806781288,
|
| 10216 |
+
"grad_norm": 2.2026431560516357,
|
| 10217 |
+
"learning_rate": 3.53743200504425e-06,
|
| 10218 |
+
"loss": 41.9196,
|
| 10219 |
+
"step": 1454
|
| 10220 |
+
},
|
| 10221 |
+
{
|
| 10222 |
+
"epoch": 0.16430930803760482,
|
| 10223 |
+
"grad_norm": 2.0222666263580322,
|
| 10224 |
+
"learning_rate": 3.4851392187797096e-06,
|
| 10225 |
+
"loss": 41.373,
|
| 10226 |
+
"step": 1455
|
| 10227 |
+
},
|
| 10228 |
+
{
|
| 10229 |
+
"epoch": 0.16442223539708084,
|
| 10230 |
+
"grad_norm": 1.94119131565094,
|
| 10231 |
+
"learning_rate": 3.4332289751041526e-06,
|
| 10232 |
+
"loss": 41.6852,
|
| 10233 |
+
"step": 1456
|
| 10234 |
+
},
|
| 10235 |
+
{
|
| 10236 |
+
"epoch": 0.16453516275655686,
|
| 10237 |
+
"grad_norm": 2.0075745582580566,
|
| 10238 |
+
"learning_rate": 3.3817014797669855e-06,
|
| 10239 |
+
"loss": 42.2261,
|
| 10240 |
+
"step": 1457
|
| 10241 |
+
},
|
| 10242 |
+
{
|
| 10243 |
+
"epoch": 0.16464809011603287,
|
| 10244 |
+
"grad_norm": 2.1959972381591797,
|
| 10245 |
+
"learning_rate": 3.3305569370006394e-06,
|
| 10246 |
+
"loss": 41.6531,
|
| 10247 |
+
"step": 1458
|
| 10248 |
+
},
|
| 10249 |
+
{
|
| 10250 |
+
"epoch": 0.1647610174755089,
|
| 10251 |
+
"grad_norm": 1.9465237855911255,
|
| 10252 |
+
"learning_rate": 3.2797955495196485e-06,
|
| 10253 |
+
"loss": 40.9465,
|
| 10254 |
+
"step": 1459
|
| 10255 |
+
},
|
| 10256 |
+
{
|
| 10257 |
+
"epoch": 0.1648739448349849,
|
| 10258 |
+
"grad_norm": 1.9407117366790771,
|
| 10259 |
+
"learning_rate": 3.229417518519884e-06,
|
| 10260 |
+
"loss": 41.6915,
|
| 10261 |
+
"step": 1460
|
| 10262 |
+
},
|
| 10263 |
+
{
|
| 10264 |
+
"epoch": 0.16498687219446093,
|
| 10265 |
+
"grad_norm": 1.9383476972579956,
|
| 10266 |
+
"learning_rate": 3.179423043677787e-06,
|
| 10267 |
+
"loss": 41.2688,
|
| 10268 |
+
"step": 1461
|
| 10269 |
+
},
|
| 10270 |
+
{
|
| 10271 |
+
"epoch": 0.16509979955393694,
|
| 10272 |
+
"grad_norm": 1.8906264305114746,
|
| 10273 |
+
"learning_rate": 3.129812323149528e-06,
|
| 10274 |
+
"loss": 41.6925,
|
| 10275 |
+
"step": 1462
|
| 10276 |
+
},
|
| 10277 |
+
{
|
| 10278 |
+
"epoch": 0.16521272691341296,
|
| 10279 |
+
"grad_norm": 2.128037929534912,
|
| 10280 |
+
"learning_rate": 3.0805855535702478e-06,
|
| 10281 |
+
"loss": 41.8391,
|
| 10282 |
+
"step": 1463
|
| 10283 |
+
},
|
| 10284 |
+
{
|
| 10285 |
+
"epoch": 0.16532565427288898,
|
| 10286 |
+
"grad_norm": 1.9633644819259644,
|
| 10287 |
+
"learning_rate": 3.031742930053283e-06,
|
| 10288 |
+
"loss": 41.4398,
|
| 10289 |
+
"step": 1464
|
| 10290 |
+
},
|
| 10291 |
+
{
|
| 10292 |
+
"epoch": 0.165438581632365,
|
| 10293 |
+
"grad_norm": 2.0199620723724365,
|
| 10294 |
+
"learning_rate": 2.983284646189377e-06,
|
| 10295 |
+
"loss": 41.181,
|
| 10296 |
+
"step": 1465
|
| 10297 |
+
},
|
| 10298 |
+
{
|
| 10299 |
+
"epoch": 0.165551508991841,
|
| 10300 |
+
"grad_norm": 1.9136090278625488,
|
| 10301 |
+
"learning_rate": 2.9352108940459035e-06,
|
| 10302 |
+
"loss": 41.3503,
|
| 10303 |
+
"step": 1466
|
| 10304 |
+
},
|
| 10305 |
+
{
|
| 10306 |
+
"epoch": 0.165664436351317,
|
| 10307 |
+
"grad_norm": 2.233808755874634,
|
| 10308 |
+
"learning_rate": 2.887521864166154e-06,
|
| 10309 |
+
"loss": 41.1728,
|
| 10310 |
+
"step": 1467
|
| 10311 |
+
},
|
| 10312 |
+
{
|
| 10313 |
+
"epoch": 0.16577736371079302,
|
| 10314 |
+
"grad_norm": 1.917020559310913,
|
| 10315 |
+
"learning_rate": 2.8402177455685296e-06,
|
| 10316 |
+
"loss": 40.9179,
|
| 10317 |
+
"step": 1468
|
| 10318 |
+
},
|
| 10319 |
+
{
|
| 10320 |
+
"epoch": 0.16589029107026904,
|
| 10321 |
+
"grad_norm": 1.6408287286758423,
|
| 10322 |
+
"learning_rate": 2.7932987257458078e-06,
|
| 10323 |
+
"loss": 41.5375,
|
| 10324 |
+
"step": 1469
|
| 10325 |
+
},
|
| 10326 |
+
{
|
| 10327 |
+
"epoch": 0.16600321842974505,
|
| 10328 |
+
"grad_norm": 1.960963487625122,
|
| 10329 |
+
"learning_rate": 2.74676499066443e-06,
|
| 10330 |
+
"loss": 41.2832,
|
| 10331 |
+
"step": 1470
|
| 10332 |
+
},
|
| 10333 |
+
{
|
| 10334 |
+
"epoch": 0.16611614578922107,
|
| 10335 |
+
"grad_norm": 1.983742356300354,
|
| 10336 |
+
"learning_rate": 2.7006167247636825e-06,
|
| 10337 |
+
"loss": 41.3164,
|
| 10338 |
+
"step": 1471
|
| 10339 |
+
},
|
| 10340 |
+
{
|
| 10341 |
+
"epoch": 0.1662290731486971,
|
| 10342 |
+
"grad_norm": 1.8501229286193848,
|
| 10343 |
+
"learning_rate": 2.654854110955085e-06,
|
| 10344 |
+
"loss": 41.4137,
|
| 10345 |
+
"step": 1472
|
| 10346 |
+
},
|
| 10347 |
+
{
|
| 10348 |
+
"epoch": 0.1663420005081731,
|
| 10349 |
+
"grad_norm": 2.023735284805298,
|
| 10350 |
+
"learning_rate": 2.609477330621557e-06,
|
| 10351 |
+
"loss": 41.7199,
|
| 10352 |
+
"step": 1473
|
| 10353 |
+
},
|
| 10354 |
+
{
|
| 10355 |
+
"epoch": 0.16645492786764912,
|
| 10356 |
+
"grad_norm": 1.827050805091858,
|
| 10357 |
+
"learning_rate": 2.564486563616786e-06,
|
| 10358 |
+
"loss": 41.7185,
|
| 10359 |
+
"step": 1474
|
| 10360 |
+
},
|
| 10361 |
+
{
|
| 10362 |
+
"epoch": 0.16656785522712514,
|
| 10363 |
+
"grad_norm": 1.8581353425979614,
|
| 10364 |
+
"learning_rate": 2.5198819882644163e-06,
|
| 10365 |
+
"loss": 41.3243,
|
| 10366 |
+
"step": 1475
|
| 10367 |
+
},
|
| 10368 |
+
{
|
| 10369 |
+
"epoch": 0.16668078258660116,
|
| 10370 |
+
"grad_norm": 1.620455026626587,
|
| 10371 |
+
"learning_rate": 2.475663781357429e-06,
|
| 10372 |
+
"loss": 41.5048,
|
| 10373 |
+
"step": 1476
|
| 10374 |
+
},
|
| 10375 |
+
{
|
| 10376 |
+
"epoch": 0.16679370994607717,
|
| 10377 |
+
"grad_norm": 2.4298722743988037,
|
| 10378 |
+
"learning_rate": 2.4318321181574287e-06,
|
| 10379 |
+
"loss": 41.2222,
|
| 10380 |
+
"step": 1477
|
| 10381 |
+
},
|
| 10382 |
+
{
|
| 10383 |
+
"epoch": 0.1669066373055532,
|
| 10384 |
+
"grad_norm": 2.0075695514678955,
|
| 10385 |
+
"learning_rate": 2.388387172393891e-06,
|
| 10386 |
+
"loss": 41.5096,
|
| 10387 |
+
"step": 1478
|
| 10388 |
+
},
|
| 10389 |
+
{
|
| 10390 |
+
"epoch": 0.1670195646650292,
|
| 10391 |
+
"grad_norm": 1.9963698387145996,
|
| 10392 |
+
"learning_rate": 2.3453291162635505e-06,
|
| 10393 |
+
"loss": 41.2284,
|
| 10394 |
+
"step": 1479
|
| 10395 |
+
},
|
| 10396 |
+
{
|
| 10397 |
+
"epoch": 0.16713249202450522,
|
| 10398 |
+
"grad_norm": 1.8903164863586426,
|
| 10399 |
+
"learning_rate": 2.3026581204296347e-06,
|
| 10400 |
+
"loss": 41.5647,
|
| 10401 |
+
"step": 1480
|
| 10402 |
+
},
|
| 10403 |
+
{
|
| 10404 |
+
"epoch": 0.16724541938398124,
|
| 10405 |
+
"grad_norm": 1.663292407989502,
|
| 10406 |
+
"learning_rate": 2.2603743540212664e-06,
|
| 10407 |
+
"loss": 41.6011,
|
| 10408 |
+
"step": 1481
|
| 10409 |
+
},
|
| 10410 |
+
{
|
| 10411 |
+
"epoch": 0.16735834674345726,
|
| 10412 |
+
"grad_norm": 1.8637601137161255,
|
| 10413 |
+
"learning_rate": 2.218477984632783e-06,
|
| 10414 |
+
"loss": 41.5713,
|
| 10415 |
+
"step": 1482
|
| 10416 |
+
},
|
| 10417 |
+
{
|
| 10418 |
+
"epoch": 0.16747127410293328,
|
| 10419 |
+
"grad_norm": 2.0682950019836426,
|
| 10420 |
+
"learning_rate": 2.176969178322985e-06,
|
| 10421 |
+
"loss": 41.1375,
|
| 10422 |
+
"step": 1483
|
| 10423 |
+
},
|
| 10424 |
+
{
|
| 10425 |
+
"epoch": 0.1675842014624093,
|
| 10426 |
+
"grad_norm": 1.9032942056655884,
|
| 10427 |
+
"learning_rate": 2.1358480996146237e-06,
|
| 10428 |
+
"loss": 41.5655,
|
| 10429 |
+
"step": 1484
|
| 10430 |
+
},
|
| 10431 |
+
{
|
| 10432 |
+
"epoch": 0.1676971288218853,
|
| 10433 |
+
"grad_norm": 2.000901699066162,
|
| 10434 |
+
"learning_rate": 2.0951149114935674e-06,
|
| 10435 |
+
"loss": 42.0378,
|
| 10436 |
+
"step": 1485
|
| 10437 |
+
},
|
| 10438 |
+
{
|
| 10439 |
+
"epoch": 0.16781005618136133,
|
| 10440 |
+
"grad_norm": 1.8774155378341675,
|
| 10441 |
+
"learning_rate": 2.0547697754083605e-06,
|
| 10442 |
+
"loss": 41.584,
|
| 10443 |
+
"step": 1486
|
| 10444 |
+
},
|
| 10445 |
+
{
|
| 10446 |
+
"epoch": 0.16792298354083735,
|
| 10447 |
+
"grad_norm": 1.858711838722229,
|
| 10448 |
+
"learning_rate": 2.0148128512694207e-06,
|
| 10449 |
+
"loss": 41.0188,
|
| 10450 |
+
"step": 1487
|
| 10451 |
+
},
|
| 10452 |
+
{
|
| 10453 |
+
"epoch": 0.16803591090031336,
|
| 10454 |
+
"grad_norm": 2.232557535171509,
|
| 10455 |
+
"learning_rate": 1.9752442974484644e-06,
|
| 10456 |
+
"loss": 41.3469,
|
| 10457 |
+
"step": 1488
|
| 10458 |
+
},
|
| 10459 |
+
{
|
| 10460 |
+
"epoch": 0.16814883825978938,
|
| 10461 |
+
"grad_norm": 1.6817073822021484,
|
| 10462 |
+
"learning_rate": 1.9360642707779152e-06,
|
| 10463 |
+
"loss": 41.6458,
|
| 10464 |
+
"step": 1489
|
| 10465 |
+
},
|
| 10466 |
+
{
|
| 10467 |
+
"epoch": 0.1682617656192654,
|
| 10468 |
+
"grad_norm": 2.1124868392944336,
|
| 10469 |
+
"learning_rate": 1.8972729265501755e-06,
|
| 10470 |
+
"loss": 41.6895,
|
| 10471 |
+
"step": 1490
|
| 10472 |
+
},
|
| 10473 |
+
{
|
| 10474 |
+
"epoch": 0.1683746929787414,
|
| 10475 |
+
"grad_norm": 1.925371766090393,
|
| 10476 |
+
"learning_rate": 1.8588704185171557e-06,
|
| 10477 |
+
"loss": 41.23,
|
| 10478 |
+
"step": 1491
|
| 10479 |
+
},
|
| 10480 |
+
{
|
| 10481 |
+
"epoch": 0.16848762033821743,
|
| 10482 |
+
"grad_norm": 2.396677255630493,
|
| 10483 |
+
"learning_rate": 1.8208568988895558e-06,
|
| 10484 |
+
"loss": 41.5456,
|
| 10485 |
+
"step": 1492
|
| 10486 |
+
},
|
| 10487 |
+
{
|
| 10488 |
+
"epoch": 0.16860054769769345,
|
| 10489 |
+
"grad_norm": 1.963968276977539,
|
| 10490 |
+
"learning_rate": 1.7832325183363087e-06,
|
| 10491 |
+
"loss": 41.474,
|
| 10492 |
+
"step": 1493
|
| 10493 |
+
},
|
| 10494 |
+
{
|
| 10495 |
+
"epoch": 0.16871347505716947,
|
| 10496 |
+
"grad_norm": 2.036190986633301,
|
| 10497 |
+
"learning_rate": 1.7459974259839363e-06,
|
| 10498 |
+
"loss": 41.5165,
|
| 10499 |
+
"step": 1494
|
| 10500 |
+
},
|
| 10501 |
+
{
|
| 10502 |
+
"epoch": 0.16882640241664548,
|
| 10503 |
+
"grad_norm": 1.7889033555984497,
|
| 10504 |
+
"learning_rate": 1.7091517694160286e-06,
|
| 10505 |
+
"loss": 41.872,
|
| 10506 |
+
"step": 1495
|
| 10507 |
+
},
|
| 10508 |
+
{
|
| 10509 |
+
"epoch": 0.1689393297761215,
|
| 10510 |
+
"grad_norm": 1.827087163925171,
|
| 10511 |
+
"learning_rate": 1.6726956946726214e-06,
|
| 10512 |
+
"loss": 42.1097,
|
| 10513 |
+
"step": 1496
|
| 10514 |
+
},
|
| 10515 |
+
{
|
| 10516 |
+
"epoch": 0.16905225713559752,
|
| 10517 |
+
"grad_norm": 2.139960527420044,
|
| 10518 |
+
"learning_rate": 1.6366293462495963e-06,
|
| 10519 |
+
"loss": 41.3848,
|
| 10520 |
+
"step": 1497
|
| 10521 |
+
},
|
| 10522 |
+
{
|
| 10523 |
+
"epoch": 0.16916518449507353,
|
| 10524 |
+
"grad_norm": 1.8208540678024292,
|
| 10525 |
+
"learning_rate": 1.6009528670981711e-06,
|
| 10526 |
+
"loss": 41.4788,
|
| 10527 |
+
"step": 1498
|
| 10528 |
+
},
|
| 10529 |
+
{
|
| 10530 |
+
"epoch": 0.16927811185454955,
|
| 10531 |
+
"grad_norm": 2.133612871170044,
|
| 10532 |
+
"learning_rate": 1.5656663986242326e-06,
|
| 10533 |
+
"loss": 41.491,
|
| 10534 |
+
"step": 1499
|
| 10535 |
+
},
|
| 10536 |
+
{
|
| 10537 |
+
"epoch": 0.16939103921402557,
|
| 10538 |
+
"grad_norm": 2.145075559616089,
|
| 10539 |
+
"learning_rate": 1.5307700806878821e-06,
|
| 10540 |
+
"loss": 41.2078,
|
| 10541 |
+
"step": 1500
|
| 10542 |
+
},
|
| 10543 |
+
{
|
| 10544 |
+
"epoch": 0.16950396657350159,
|
| 10545 |
+
"grad_norm": 1.5864264965057373,
|
| 10546 |
+
"learning_rate": 1.4962640516028248e-06,
|
| 10547 |
+
"loss": 41.8474,
|
| 10548 |
+
"step": 1501
|
| 10549 |
+
},
|
| 10550 |
+
{
|
| 10551 |
+
"epoch": 0.1696168939329776,
|
| 10552 |
+
"grad_norm": 1.9263226985931396,
|
| 10553 |
+
"learning_rate": 1.462148448135836e-06,
|
| 10554 |
+
"loss": 41.5714,
|
| 10555 |
+
"step": 1502
|
| 10556 |
+
},
|
| 10557 |
+
{
|
| 10558 |
+
"epoch": 0.16972982129245362,
|
| 10559 |
+
"grad_norm": 1.6857662200927734,
|
| 10560 |
+
"learning_rate": 1.4284234055062185e-06,
|
| 10561 |
+
"loss": 41.3994,
|
| 10562 |
+
"step": 1503
|
| 10563 |
+
},
|
| 10564 |
+
{
|
| 10565 |
+
"epoch": 0.16984274865192964,
|
| 10566 |
+
"grad_norm": 1.8221849203109741,
|
| 10567 |
+
"learning_rate": 1.3950890573852126e-06,
|
| 10568 |
+
"loss": 42.0943,
|
| 10569 |
+
"step": 1504
|
| 10570 |
+
},
|
| 10571 |
+
{
|
| 10572 |
+
"epoch": 0.16995567601140565,
|
| 10573 |
+
"grad_norm": 1.8558201789855957,
|
| 10574 |
+
"learning_rate": 1.362145535895587e-06,
|
| 10575 |
+
"loss": 41.6201,
|
| 10576 |
+
"step": 1505
|
| 10577 |
+
},
|
| 10578 |
+
{
|
| 10579 |
+
"epoch": 0.17006860337088167,
|
| 10580 |
+
"grad_norm": 1.7473299503326416,
|
| 10581 |
+
"learning_rate": 1.3295929716110267e-06,
|
| 10582 |
+
"loss": 41.6125,
|
| 10583 |
+
"step": 1506
|
| 10584 |
+
},
|
| 10585 |
+
{
|
| 10586 |
+
"epoch": 0.1701815307303577,
|
| 10587 |
+
"grad_norm": 2.059645652770996,
|
| 10588 |
+
"learning_rate": 1.297431493555612e-06,
|
| 10589 |
+
"loss": 41.709,
|
| 10590 |
+
"step": 1507
|
| 10591 |
+
},
|
| 10592 |
+
{
|
| 10593 |
+
"epoch": 0.1702944580898337,
|
| 10594 |
+
"grad_norm": 1.7145189046859741,
|
| 10595 |
+
"learning_rate": 1.2656612292033187e-06,
|
| 10596 |
+
"loss": 41.9994,
|
| 10597 |
+
"step": 1508
|
| 10598 |
+
},
|
| 10599 |
+
{
|
| 10600 |
+
"epoch": 0.17040738544930972,
|
| 10601 |
+
"grad_norm": 2.4104723930358887,
|
| 10602 |
+
"learning_rate": 1.2342823044775743e-06,
|
| 10603 |
+
"loss": 41.7577,
|
| 10604 |
+
"step": 1509
|
| 10605 |
+
},
|
| 10606 |
+
{
|
| 10607 |
+
"epoch": 0.17052031280878574,
|
| 10608 |
+
"grad_norm": 1.7731581926345825,
|
| 10609 |
+
"learning_rate": 1.2032948437506576e-06,
|
| 10610 |
+
"loss": 41.9092,
|
| 10611 |
+
"step": 1510
|
| 10612 |
+
},
|
| 10613 |
+
{
|
| 10614 |
+
"epoch": 0.17063324016826176,
|
| 10615 |
+
"grad_norm": 1.936035394668579,
|
| 10616 |
+
"learning_rate": 1.1726989698432888e-06,
|
| 10617 |
+
"loss": 40.9722,
|
| 10618 |
+
"step": 1511
|
| 10619 |
+
},
|
| 10620 |
+
{
|
| 10621 |
+
"epoch": 0.17074616752773777,
|
| 10622 |
+
"grad_norm": 1.7427316904067993,
|
| 10623 |
+
"learning_rate": 1.1424948040240969e-06,
|
| 10624 |
+
"loss": 41.2284,
|
| 10625 |
+
"step": 1512
|
| 10626 |
+
},
|
| 10627 |
+
{
|
| 10628 |
+
"epoch": 0.1708590948872138,
|
| 10629 |
+
"grad_norm": 2.10392427444458,
|
| 10630 |
+
"learning_rate": 1.1126824660091516e-06,
|
| 10631 |
+
"loss": 40.9845,
|
| 10632 |
+
"step": 1513
|
| 10633 |
+
},
|
| 10634 |
+
{
|
| 10635 |
+
"epoch": 0.1709720222466898,
|
| 10636 |
+
"grad_norm": 1.9525678157806396,
|
| 10637 |
+
"learning_rate": 1.0832620739614663e-06,
|
| 10638 |
+
"loss": 41.9995,
|
| 10639 |
+
"step": 1514
|
| 10640 |
+
},
|
| 10641 |
+
{
|
| 10642 |
+
"epoch": 0.17108494960616583,
|
| 10643 |
+
"grad_norm": 1.9302457571029663,
|
| 10644 |
+
"learning_rate": 1.054233744490607e-06,
|
| 10645 |
+
"loss": 41.1229,
|
| 10646 |
+
"step": 1515
|
| 10647 |
+
},
|
| 10648 |
+
{
|
| 10649 |
+
"epoch": 0.17119787696564184,
|
| 10650 |
+
"grad_norm": 1.781753420829773,
|
| 10651 |
+
"learning_rate": 1.0255975926521166e-06,
|
| 10652 |
+
"loss": 41.6302,
|
| 10653 |
+
"step": 1516
|
| 10654 |
+
},
|
| 10655 |
+
{
|
| 10656 |
+
"epoch": 0.17131080432511786,
|
| 10657 |
+
"grad_norm": 1.7032277584075928,
|
| 10658 |
+
"learning_rate": 9.973537319471704e-07,
|
| 10659 |
+
"loss": 41.7304,
|
| 10660 |
+
"step": 1517
|
| 10661 |
+
},
|
| 10662 |
+
{
|
| 10663 |
+
"epoch": 0.17142373168459388,
|
| 10664 |
+
"grad_norm": 1.9419806003570557,
|
| 10665 |
+
"learning_rate": 9.695022743220317e-07,
|
| 10666 |
+
"loss": 40.5154,
|
| 10667 |
+
"step": 1518
|
| 10668 |
+
},
|
| 10669 |
+
{
|
| 10670 |
+
"epoch": 0.1715366590440699,
|
| 10671 |
+
"grad_norm": 1.8021647930145264,
|
| 10672 |
+
"learning_rate": 9.420433301676634e-07,
|
| 10673 |
+
"loss": 41.597,
|
| 10674 |
+
"step": 1519
|
| 10675 |
+
},
|
| 10676 |
+
{
|
| 10677 |
+
"epoch": 0.1716495864035459,
|
| 10678 |
+
"grad_norm": 2.064415216445923,
|
| 10679 |
+
"learning_rate": 9.149770083192955e-07,
|
| 10680 |
+
"loss": 41.2246,
|
| 10681 |
+
"step": 1520
|
| 10682 |
+
},
|
| 10683 |
+
{
|
| 10684 |
+
"epoch": 0.17176251376302193,
|
| 10685 |
+
"grad_norm": 1.8920707702636719,
|
| 10686 |
+
"learning_rate": 8.883034160559467e-07,
|
| 10687 |
+
"loss": 41.7411,
|
| 10688 |
+
"step": 1521
|
| 10689 |
+
},
|
| 10690 |
+
{
|
| 10691 |
+
"epoch": 0.17187544112249795,
|
| 10692 |
+
"grad_norm": 2.061473846435547,
|
| 10693 |
+
"learning_rate": 8.620226591000479e-07,
|
| 10694 |
+
"loss": 41.5149,
|
| 10695 |
+
"step": 1522
|
| 10696 |
+
},
|
| 10697 |
+
{
|
| 10698 |
+
"epoch": 0.17198836848197396,
|
| 10699 |
+
"grad_norm": 1.7767322063446045,
|
| 10700 |
+
"learning_rate": 8.361348416169979e-07,
|
| 10701 |
+
"loss": 41.5821,
|
| 10702 |
+
"step": 1523
|
| 10703 |
+
},
|
| 10704 |
+
{
|
| 10705 |
+
"epoch": 0.17210129584144998,
|
| 10706 |
+
"grad_norm": 1.8125262260437012,
|
| 10707 |
+
"learning_rate": 8.106400662147295e-07,
|
| 10708 |
+
"loss": 41.5991,
|
| 10709 |
+
"step": 1524
|
| 10710 |
+
},
|
| 10711 |
+
{
|
| 10712 |
+
"epoch": 0.172214223200926,
|
| 10713 |
+
"grad_norm": 1.8039369583129883,
|
| 10714 |
+
"learning_rate": 7.855384339433891e-07,
|
| 10715 |
+
"loss": 42.3645,
|
| 10716 |
+
"step": 1525
|
| 10717 |
+
},
|
| 10718 |
+
{
|
| 10719 |
+
"epoch": 0.17232715056040201,
|
| 10720 |
+
"grad_norm": 2.088252544403076,
|
| 10721 |
+
"learning_rate": 7.608300442948358e-07,
|
| 10722 |
+
"loss": 42.025,
|
| 10723 |
+
"step": 1526
|
| 10724 |
+
},
|
| 10725 |
+
{
|
| 10726 |
+
"epoch": 0.17244007791987803,
|
| 10727 |
+
"grad_norm": 1.9819536209106445,
|
| 10728 |
+
"learning_rate": 7.365149952022643e-07,
|
| 10729 |
+
"loss": 41.4678,
|
| 10730 |
+
"step": 1527
|
| 10731 |
+
},
|
| 10732 |
+
{
|
| 10733 |
+
"epoch": 0.17255300527935405,
|
| 10734 |
+
"grad_norm": 2.0395498275756836,
|
| 10735 |
+
"learning_rate": 7.125933830398945e-07,
|
| 10736 |
+
"loss": 41.7027,
|
| 10737 |
+
"step": 1528
|
| 10738 |
+
},
|
| 10739 |
+
{
|
| 10740 |
+
"epoch": 0.17266593263883007,
|
| 10741 |
+
"grad_norm": 1.763100266456604,
|
| 10742 |
+
"learning_rate": 6.890653026224935e-07,
|
| 10743 |
+
"loss": 41.1751,
|
| 10744 |
+
"step": 1529
|
| 10745 |
+
},
|
| 10746 |
+
{
|
| 10747 |
+
"epoch": 0.17277885999830608,
|
| 10748 |
+
"grad_norm": 1.7143844366073608,
|
| 10749 |
+
"learning_rate": 6.659308472050651e-07,
|
| 10750 |
+
"loss": 41.4159,
|
| 10751 |
+
"step": 1530
|
| 10752 |
+
},
|
| 10753 |
+
{
|
| 10754 |
+
"epoch": 0.1728917873577821,
|
| 10755 |
+
"grad_norm": 2.05802321434021,
|
| 10756 |
+
"learning_rate": 6.431901084824499e-07,
|
| 10757 |
+
"loss": 41.7011,
|
| 10758 |
+
"step": 1531
|
| 10759 |
+
},
|
| 10760 |
+
{
|
| 10761 |
+
"epoch": 0.17300471471725812,
|
| 10762 |
+
"grad_norm": 1.817959189414978,
|
| 10763 |
+
"learning_rate": 6.208431765889477e-07,
|
| 10764 |
+
"loss": 41.8897,
|
| 10765 |
+
"step": 1532
|
| 10766 |
+
},
|
| 10767 |
+
{
|
| 10768 |
+
"epoch": 0.17311764207673414,
|
| 10769 |
+
"grad_norm": 1.8400397300720215,
|
| 10770 |
+
"learning_rate": 5.988901400980073e-07,
|
| 10771 |
+
"loss": 41.619,
|
| 10772 |
+
"step": 1533
|
| 10773 |
+
},
|
| 10774 |
+
{
|
| 10775 |
+
"epoch": 0.17323056943621015,
|
| 10776 |
+
"grad_norm": 1.824346899986267,
|
| 10777 |
+
"learning_rate": 5.773310860218373e-07,
|
| 10778 |
+
"loss": 41.5622,
|
| 10779 |
+
"step": 1534
|
| 10780 |
+
},
|
| 10781 |
+
{
|
| 10782 |
+
"epoch": 0.17334349679568617,
|
| 10783 |
+
"grad_norm": 1.7274119853973389,
|
| 10784 |
+
"learning_rate": 5.561660998110952e-07,
|
| 10785 |
+
"loss": 41.3973,
|
| 10786 |
+
"step": 1535
|
| 10787 |
+
},
|
| 10788 |
+
{
|
| 10789 |
+
"epoch": 0.1734564241551622,
|
| 10790 |
+
"grad_norm": 1.8566423654556274,
|
| 10791 |
+
"learning_rate": 5.353952653544769e-07,
|
| 10792 |
+
"loss": 41.6753,
|
| 10793 |
+
"step": 1536
|
| 10794 |
+
},
|
| 10795 |
+
{
|
| 10796 |
+
"epoch": 0.1735693515146382,
|
| 10797 |
+
"grad_norm": 1.8928756713867188,
|
| 10798 |
+
"learning_rate": 5.150186649784728e-07,
|
| 10799 |
+
"loss": 41.1982,
|
| 10800 |
+
"step": 1537
|
| 10801 |
+
},
|
| 10802 |
+
{
|
| 10803 |
+
"epoch": 0.17368227887411422,
|
| 10804 |
+
"grad_norm": 2.570246458053589,
|
| 10805 |
+
"learning_rate": 4.950363794470003e-07,
|
| 10806 |
+
"loss": 41.7803,
|
| 10807 |
+
"step": 1538
|
| 10808 |
+
},
|
| 10809 |
+
{
|
| 10810 |
+
"epoch": 0.17379520623359024,
|
| 10811 |
+
"grad_norm": 1.7915527820587158,
|
| 10812 |
+
"learning_rate": 4.75448487961061e-07,
|
| 10813 |
+
"loss": 41.4636,
|
| 10814 |
+
"step": 1539
|
| 10815 |
+
},
|
| 10816 |
+
{
|
| 10817 |
+
"epoch": 0.17390813359306626,
|
| 10818 |
+
"grad_norm": 2.3216946125030518,
|
| 10819 |
+
"learning_rate": 4.562550681584954e-07,
|
| 10820 |
+
"loss": 41.2707,
|
| 10821 |
+
"step": 1540
|
| 10822 |
+
},
|
| 10823 |
+
{
|
| 10824 |
+
"epoch": 0.17402106095254227,
|
| 10825 |
+
"grad_norm": 2.081275701522827,
|
| 10826 |
+
"learning_rate": 4.374561961135726e-07,
|
| 10827 |
+
"loss": 41.262,
|
| 10828 |
+
"step": 1541
|
| 10829 |
+
},
|
| 10830 |
+
{
|
| 10831 |
+
"epoch": 0.1741339883120183,
|
| 10832 |
+
"grad_norm": 2.1218016147613525,
|
| 10833 |
+
"learning_rate": 4.190519463368014e-07,
|
| 10834 |
+
"loss": 41.4673,
|
| 10835 |
+
"step": 1542
|
| 10836 |
+
},
|
| 10837 |
+
{
|
| 10838 |
+
"epoch": 0.1742469156714943,
|
| 10839 |
+
"grad_norm": 1.9064199924468994,
|
| 10840 |
+
"learning_rate": 4.0104239177454206e-07,
|
| 10841 |
+
"loss": 41.0471,
|
| 10842 |
+
"step": 1543
|
| 10843 |
+
},
|
| 10844 |
+
{
|
| 10845 |
+
"epoch": 0.17435984303097032,
|
| 10846 |
+
"grad_norm": 1.7893544435501099,
|
| 10847 |
+
"learning_rate": 3.834276038087836e-07,
|
| 10848 |
+
"loss": 41.4374,
|
| 10849 |
+
"step": 1544
|
| 10850 |
+
},
|
| 10851 |
+
{
|
| 10852 |
+
"epoch": 0.17447277039044634,
|
| 10853 |
+
"grad_norm": 1.757582187652588,
|
| 10854 |
+
"learning_rate": 3.662076522568225e-07,
|
| 10855 |
+
"loss": 41.2144,
|
| 10856 |
+
"step": 1545
|
| 10857 |
+
},
|
| 10858 |
+
{
|
| 10859 |
+
"epoch": 0.17458569774992236,
|
| 10860 |
+
"grad_norm": 1.7492799758911133,
|
| 10861 |
+
"learning_rate": 3.4938260537098476e-07,
|
| 10862 |
+
"loss": 41.6423,
|
| 10863 |
+
"step": 1546
|
| 10864 |
+
},
|
| 10865 |
+
{
|
| 10866 |
+
"epoch": 0.17469862510939838,
|
| 10867 |
+
"grad_norm": 1.9989771842956543,
|
| 10868 |
+
"learning_rate": 3.3295252983838177e-07,
|
| 10869 |
+
"loss": 41.3669,
|
| 10870 |
+
"step": 1547
|
| 10871 |
+
},
|
| 10872 |
+
{
|
| 10873 |
+
"epoch": 0.1748115524688744,
|
| 10874 |
+
"grad_norm": 1.8784407377243042,
|
| 10875 |
+
"learning_rate": 3.1691749078064384e-07,
|
| 10876 |
+
"loss": 41.5393,
|
| 10877 |
+
"step": 1548
|
| 10878 |
+
},
|
| 10879 |
+
{
|
| 10880 |
+
"epoch": 0.1749244798283504,
|
| 10881 |
+
"grad_norm": 1.9025720357894897,
|
| 10882 |
+
"learning_rate": 3.0127755175362037e-07,
|
| 10883 |
+
"loss": 41.3123,
|
| 10884 |
+
"step": 1549
|
| 10885 |
+
},
|
| 10886 |
+
{
|
| 10887 |
+
"epoch": 0.17503740718782643,
|
| 10888 |
+
"grad_norm": 2.25610089302063,
|
| 10889 |
+
"learning_rate": 2.8603277474716917e-07,
|
| 10890 |
+
"loss": 41.6841,
|
| 10891 |
+
"step": 1550
|
| 10892 |
+
},
|
| 10893 |
+
{
|
| 10894 |
+
"epoch": 0.17515033454730244,
|
| 10895 |
+
"grad_norm": 1.813429594039917,
|
| 10896 |
+
"learning_rate": 2.711832201849229e-07,
|
| 10897 |
+
"loss": 41.924,
|
| 10898 |
+
"step": 1551
|
| 10899 |
+
},
|
| 10900 |
+
{
|
| 10901 |
+
"epoch": 0.17526326190677846,
|
| 10902 |
+
"grad_norm": 1.986996054649353,
|
| 10903 |
+
"learning_rate": 2.567289469239786e-07,
|
| 10904 |
+
"loss": 41.5647,
|
| 10905 |
+
"step": 1552
|
| 10906 |
+
},
|
| 10907 |
+
{
|
| 10908 |
+
"epoch": 0.17537618926625448,
|
| 10909 |
+
"grad_norm": 1.9800907373428345,
|
| 10910 |
+
"learning_rate": 2.4267001225474207e-07,
|
| 10911 |
+
"loss": 41.2997,
|
| 10912 |
+
"step": 1553
|
| 10913 |
+
},
|
| 10914 |
+
{
|
| 10915 |
+
"epoch": 0.1754891166257305,
|
| 10916 |
+
"grad_norm": 1.9732836484909058,
|
| 10917 |
+
"learning_rate": 2.2900647190068348e-07,
|
| 10918 |
+
"loss": 41.2504,
|
| 10919 |
+
"step": 1554
|
| 10920 |
+
},
|
| 10921 |
+
{
|
| 10922 |
+
"epoch": 0.1756020439852065,
|
| 10923 |
+
"grad_norm": 1.8323760032653809,
|
| 10924 |
+
"learning_rate": 2.1573838001807123e-07,
|
| 10925 |
+
"loss": 41.4995,
|
| 10926 |
+
"step": 1555
|
| 10927 |
+
},
|
| 10928 |
+
{
|
| 10929 |
+
"epoch": 0.17571497134468253,
|
| 10930 |
+
"grad_norm": 1.735358715057373,
|
| 10931 |
+
"learning_rate": 2.0286578919581632e-07,
|
| 10932 |
+
"loss": 41.8666,
|
| 10933 |
+
"step": 1556
|
| 10934 |
+
},
|
| 10935 |
+
{
|
| 10936 |
+
"epoch": 0.17582789870415855,
|
| 10937 |
+
"grad_norm": 2.0114810466766357,
|
| 10938 |
+
"learning_rate": 1.9038875045520598e-07,
|
| 10939 |
+
"loss": 41.6565,
|
| 10940 |
+
"step": 1557
|
| 10941 |
+
},
|
| 10942 |
+
{
|
| 10943 |
+
"epoch": 0.17594082606363456,
|
| 10944 |
+
"grad_norm": 1.8374451398849487,
|
| 10945 |
+
"learning_rate": 1.7830731324977036e-07,
|
| 10946 |
+
"loss": 41.449,
|
| 10947 |
+
"step": 1558
|
| 10948 |
+
},
|
| 10949 |
+
{
|
| 10950 |
+
"epoch": 0.17605375342311058,
|
| 10951 |
+
"grad_norm": 2.03023362159729,
|
| 10952 |
+
"learning_rate": 1.6662152546500499e-07,
|
| 10953 |
+
"loss": 42.007,
|
| 10954 |
+
"step": 1559
|
| 10955 |
+
},
|
| 10956 |
+
{
|
| 10957 |
+
"epoch": 0.1761666807825866,
|
| 10958 |
+
"grad_norm": 2.483574867248535,
|
| 10959 |
+
"learning_rate": 1.5533143341827094e-07,
|
| 10960 |
+
"loss": 41.2404,
|
| 10961 |
+
"step": 1560
|
| 10962 |
+
},
|
| 10963 |
+
{
|
| 10964 |
+
"epoch": 0.17627960814206262,
|
| 10965 |
+
"grad_norm": 2.344477415084839,
|
| 10966 |
+
"learning_rate": 1.4443708185853943e-07,
|
| 10967 |
+
"loss": 41.5551,
|
| 10968 |
+
"step": 1561
|
| 10969 |
+
},
|
| 10970 |
+
{
|
| 10971 |
+
"epoch": 0.17639253550153863,
|
| 10972 |
+
"grad_norm": 1.9216927289962769,
|
| 10973 |
+
"learning_rate": 1.3393851396623634e-07,
|
| 10974 |
+
"loss": 41.4717,
|
| 10975 |
+
"step": 1562
|
| 10976 |
+
},
|
| 10977 |
+
{
|
| 10978 |
+
"epoch": 0.17650546286101465,
|
| 10979 |
+
"grad_norm": 1.7364076375961304,
|
| 10980 |
+
"learning_rate": 1.238357713530869e-07,
|
| 10981 |
+
"loss": 40.9804,
|
| 10982 |
+
"step": 1563
|
| 10983 |
+
},
|
| 10984 |
+
{
|
| 10985 |
+
"epoch": 0.17661839022049067,
|
| 10986 |
+
"grad_norm": 1.9465192556381226,
|
| 10987 |
+
"learning_rate": 1.1412889406192673e-07,
|
| 10988 |
+
"loss": 41.7254,
|
| 10989 |
+
"step": 1564
|
| 10990 |
+
},
|
| 10991 |
+
{
|
| 10992 |
+
"epoch": 0.17673131757996668,
|
| 10993 |
+
"grad_norm": 1.628072738647461,
|
| 10994 |
+
"learning_rate": 1.0481792056655782e-07,
|
| 10995 |
+
"loss": 41.7692,
|
| 10996 |
+
"step": 1565
|
| 10997 |
+
},
|
| 10998 |
+
{
|
| 10999 |
+
"epoch": 0.1768442449394427,
|
| 11000 |
+
"grad_norm": 2.578184127807617,
|
| 11001 |
+
"learning_rate": 9.590288777161505e-08,
|
| 11002 |
+
"loss": 41.4533,
|
| 11003 |
+
"step": 1566
|
| 11004 |
+
},
|
| 11005 |
+
{
|
| 11006 |
+
"epoch": 0.17695717229891872,
|
| 11007 |
+
"grad_norm": 2.051501989364624,
|
| 11008 |
+
"learning_rate": 8.738383101235537e-08,
|
| 11009 |
+
"loss": 41.3184,
|
| 11010 |
+
"step": 1567
|
| 11011 |
+
},
|
| 11012 |
+
{
|
| 11013 |
+
"epoch": 0.17707009965839474,
|
| 11014 |
+
"grad_norm": 1.8154493570327759,
|
| 11015 |
+
"learning_rate": 7.926078405460224e-08,
|
| 11016 |
+
"loss": 42.1375,
|
| 11017 |
+
"step": 1568
|
| 11018 |
+
},
|
| 11019 |
+
{
|
| 11020 |
+
"epoch": 0.17718302701787075,
|
| 11021 |
+
"grad_norm": 1.7539405822753906,
|
| 11022 |
+
"learning_rate": 7.153377909455694e-08,
|
| 11023 |
+
"loss": 41.5052,
|
| 11024 |
+
"step": 1569
|
| 11025 |
+
},
|
| 11026 |
+
{
|
| 11027 |
+
"epoch": 0.17729595437734677,
|
| 11028 |
+
"grad_norm": 1.9021331071853638,
|
| 11029 |
+
"learning_rate": 6.420284675865418e-08,
|
| 11030 |
+
"loss": 41.7205,
|
| 11031 |
+
"step": 1570
|
| 11032 |
+
},
|
| 11033 |
+
{
|
| 11034 |
+
"epoch": 0.1774088817368228,
|
| 11035 |
+
"grad_norm": 1.639117956161499,
|
| 11036 |
+
"learning_rate": 5.726801610351773e-08,
|
| 11037 |
+
"loss": 41.812,
|
| 11038 |
+
"step": 1571
|
| 11039 |
+
},
|
| 11040 |
+
{
|
| 11041 |
+
"epoch": 0.1775218090962988,
|
| 11042 |
+
"grad_norm": 1.9544332027435303,
|
| 11043 |
+
"learning_rate": 5.072931461576058e-08,
|
| 11044 |
+
"loss": 41.3335,
|
| 11045 |
+
"step": 1572
|
| 11046 |
+
},
|
| 11047 |
+
{
|
| 11048 |
+
"epoch": 0.17763473645577482,
|
| 11049 |
+
"grad_norm": 2.0728046894073486,
|
| 11050 |
+
"learning_rate": 4.458676821194052e-08,
|
| 11051 |
+
"loss": 41.0186,
|
| 11052 |
+
"step": 1573
|
| 11053 |
+
},
|
| 11054 |
+
{
|
| 11055 |
+
"epoch": 0.17774766381525084,
|
| 11056 |
+
"grad_norm": 1.6041828393936157,
|
| 11057 |
+
"learning_rate": 3.8840401238415814e-08,
|
| 11058 |
+
"loss": 41.3956,
|
| 11059 |
+
"step": 1574
|
| 11060 |
+
},
|
| 11061 |
+
{
|
| 11062 |
+
"epoch": 0.17786059117472686,
|
| 11063 |
+
"grad_norm": 2.0778987407684326,
|
| 11064 |
+
"learning_rate": 3.3490236471256375e-08,
|
| 11065 |
+
"loss": 41.6968,
|
| 11066 |
+
"step": 1575
|
| 11067 |
+
},
|
| 11068 |
+
{
|
| 11069 |
+
"epoch": 0.17797351853420287,
|
| 11070 |
+
"grad_norm": 2.1188883781433105,
|
| 11071 |
+
"learning_rate": 2.853629511617717e-08,
|
| 11072 |
+
"loss": 41.161,
|
| 11073 |
+
"step": 1576
|
| 11074 |
+
},
|
| 11075 |
+
{
|
| 11076 |
+
"epoch": 0.1780864458936789,
|
| 11077 |
+
"grad_norm": 1.7552748918533325,
|
| 11078 |
+
"learning_rate": 2.3978596808427177e-08,
|
| 11079 |
+
"loss": 42.1456,
|
| 11080 |
+
"step": 1577
|
| 11081 |
+
},
|
| 11082 |
+
{
|
| 11083 |
+
"epoch": 0.1781993732531549,
|
| 11084 |
+
"grad_norm": 1.8835361003875732,
|
| 11085 |
+
"learning_rate": 1.981715961272279e-08,
|
| 11086 |
+
"loss": 41.7951,
|
| 11087 |
+
"step": 1578
|
| 11088 |
+
},
|
| 11089 |
+
{
|
| 11090 |
+
"epoch": 0.17831230061263093,
|
| 11091 |
+
"grad_norm": 1.8786207437515259,
|
| 11092 |
+
"learning_rate": 1.6052000023192292e-08,
|
| 11093 |
+
"loss": 41.368,
|
| 11094 |
+
"step": 1579
|
| 11095 |
+
},
|
| 11096 |
+
{
|
| 11097 |
+
"epoch": 0.17842522797210694,
|
| 11098 |
+
"grad_norm": 1.9332475662231445,
|
| 11099 |
+
"learning_rate": 1.2683132963253742e-08,
|
| 11100 |
+
"loss": 41.2115,
|
| 11101 |
+
"step": 1580
|
| 11102 |
+
},
|
| 11103 |
+
{
|
| 11104 |
+
"epoch": 0.17853815533158296,
|
| 11105 |
+
"grad_norm": 2.0962584018707275,
|
| 11106 |
+
"learning_rate": 9.710571785626065e-09,
|
| 11107 |
+
"loss": 41.7296,
|
| 11108 |
+
"step": 1581
|
| 11109 |
+
},
|
| 11110 |
+
{
|
| 11111 |
+
"epoch": 0.17865108269105898,
|
| 11112 |
+
"grad_norm": 1.8046841621398926,
|
| 11113 |
+
"learning_rate": 7.134328272240254e-09,
|
| 11114 |
+
"loss": 41.4222,
|
| 11115 |
+
"step": 1582
|
| 11116 |
+
},
|
| 11117 |
+
{
|
| 11118 |
+
"epoch": 0.178764010050535,
|
| 11119 |
+
"grad_norm": 2.1386616230010986,
|
| 11120 |
+
"learning_rate": 4.95441263420604e-09,
|
| 11121 |
+
"loss": 41.5938,
|
| 11122 |
+
"step": 1583
|
| 11123 |
+
},
|
| 11124 |
+
{
|
| 11125 |
+
"epoch": 0.178876937410011,
|
| 11126 |
+
"grad_norm": 1.9254075288772583,
|
| 11127 |
+
"learning_rate": 3.170833511734195e-09,
|
| 11128 |
+
"loss": 41.5884,
|
| 11129 |
+
"step": 1584
|
| 11130 |
+
},
|
| 11131 |
+
{
|
| 11132 |
+
"epoch": 0.17898986476948703,
|
| 11133 |
+
"grad_norm": 1.779555082321167,
|
| 11134 |
+
"learning_rate": 1.7835979741698261e-09,
|
| 11135 |
+
"loss": 41.1965,
|
| 11136 |
+
"step": 1585
|
| 11137 |
+
},
|
| 11138 |
+
{
|
| 11139 |
+
"epoch": 0.17910279212896305,
|
| 11140 |
+
"grad_norm": 1.5988552570343018,
|
| 11141 |
+
"learning_rate": 7.927115198924639e-10,
|
| 11142 |
+
"loss": 41.2601,
|
| 11143 |
+
"step": 1586
|
| 11144 |
+
},
|
| 11145 |
+
{
|
| 11146 |
+
"epoch": 0.17921571948843906,
|
| 11147 |
+
"grad_norm": 1.7613376379013062,
|
| 11148 |
+
"learning_rate": 1.9817807634936457e-10,
|
| 11149 |
+
"loss": 40.8857,
|
| 11150 |
+
"step": 1587
|
| 11151 |
+
},
|
| 11152 |
+
{
|
| 11153 |
+
"epoch": 0.17932864684791508,
|
| 11154 |
+
"grad_norm": 2.1686909198760986,
|
| 11155 |
+
"learning_rate": 0.0,
|
| 11156 |
+
"loss": 41.6234,
|
| 11157 |
+
"step": 1588
|
| 11158 |
+
},
|
| 11159 |
+
{
|
| 11160 |
+
"epoch": 0.17932864684791508,
|
| 11161 |
+
"eval_loss": 10.36661148071289,
|
| 11162 |
+
"eval_runtime": 9.7069,
|
| 11163 |
+
"eval_samples_per_second": 384.159,
|
| 11164 |
+
"eval_steps_per_second": 192.131,
|
| 11165 |
+
"step": 1588
|
| 11166 |
}
|
| 11167 |
],
|
| 11168 |
"logging_steps": 1,
|
|
|
|
| 11177 |
"should_evaluate": false,
|
| 11178 |
"should_log": false,
|
| 11179 |
"should_save": true,
|
| 11180 |
+
"should_training_stop": true
|
| 11181 |
},
|
| 11182 |
"attributes": {}
|
| 11183 |
}
|
| 11184 |
},
|
| 11185 |
+
"total_flos": 15347291258880.0,
|
| 11186 |
"train_batch_size": 2,
|
| 11187 |
"trial_name": null,
|
| 11188 |
"trial_params": null
|