Training in progress, step 1585, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 125040
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:33732568a5b65dbd195f9554b75713777af2e009441071f26379252827aee485
|
| 3 |
size 125040
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 162868
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:824605715c3d593a878b268a59a3f94c8d4cdd3e7b2cc1eb258c929357eaf6b3
|
| 3 |
size 162868
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a94a358fb86a47a5c1716fc5fe61e6478a59f00646784448aa9223b9aac559c0
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9d5478343db9e68a8a2f1d1ec4fa05a377807845c22911fe73e8d311474a44c1
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 397,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -8376,6 +8376,2764 @@
|
|
| 8376 |
"eval_samples_per_second": 379.265,
|
| 8377 |
"eval_steps_per_second": 189.677,
|
| 8378 |
"step": 1191
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8379 |
}
|
| 8380 |
],
|
| 8381 |
"logging_steps": 1,
|
|
@@ -8390,12 +11148,12 @@
|
|
| 8390 |
"should_evaluate": false,
|
| 8391 |
"should_log": false,
|
| 8392 |
"should_save": true,
|
| 8393 |
-
"should_training_stop":
|
| 8394 |
},
|
| 8395 |
"attributes": {}
|
| 8396 |
}
|
| 8397 |
},
|
| 8398 |
-
"total_flos":
|
| 8399 |
"train_batch_size": 2,
|
| 8400 |
"trial_name": null,
|
| 8401 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.15721087085895655,
|
| 5 |
"eval_steps": 397,
|
| 6 |
+
"global_step": 1585,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 8376 |
"eval_samples_per_second": 379.265,
|
| 8377 |
"eval_steps_per_second": 189.677,
|
| 8378 |
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.11823050981948026,
|
| 8382 |
+
"grad_norm": 2.6118314266204834,
|
| 8383 |
+
"learning_rate": 2.9183618119133062e-05,
|
| 8384 |
+
"loss": 43.4291,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.11832969648879191,
|
| 8389 |
+
"grad_norm": 2.060483455657959,
|
| 8390 |
+
"learning_rate": 2.904292634634793e-05,
|
| 8391 |
+
"loss": 42.8555,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.11842888315810356,
|
| 8396 |
+
"grad_norm": 2.026916027069092,
|
| 8397 |
+
"learning_rate": 2.890251688871086e-05,
|
| 8398 |
+
"loss": 43.3655,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.11852806982741519,
|
| 8403 |
+
"grad_norm": 1.8628168106079102,
|
| 8404 |
+
"learning_rate": 2.876239030486554e-05,
|
| 8405 |
+
"loss": 43.4268,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.11862725649672684,
|
| 8410 |
+
"grad_norm": 1.8934178352355957,
|
| 8411 |
+
"learning_rate": 2.862254715233006e-05,
|
| 8412 |
+
"loss": 43.2936,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.11872644316603849,
|
| 8417 |
+
"grad_norm": 1.8872848749160767,
|
| 8418 |
+
"learning_rate": 2.8482987987494957e-05,
|
| 8419 |
+
"loss": 43.5978,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.11882562983535012,
|
| 8424 |
+
"grad_norm": 1.600115418434143,
|
| 8425 |
+
"learning_rate": 2.8343713365620772e-05,
|
| 8426 |
+
"loss": 43.3645,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.11892481650466177,
|
| 8431 |
+
"grad_norm": 1.702733039855957,
|
| 8432 |
+
"learning_rate": 2.8204723840836045e-05,
|
| 8433 |
+
"loss": 43.3277,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.11902400317397342,
|
| 8438 |
+
"grad_norm": 1.8640950918197632,
|
| 8439 |
+
"learning_rate": 2.8066019966134904e-05,
|
| 8440 |
+
"loss": 43.5072,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.11912318984328506,
|
| 8445 |
+
"grad_norm": 1.9519011974334717,
|
| 8446 |
+
"learning_rate": 2.7927602293375e-05,
|
| 8447 |
+
"loss": 43.2203,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.1192223765125967,
|
| 8452 |
+
"grad_norm": 1.8447949886322021,
|
| 8453 |
+
"learning_rate": 2.7789471373275344e-05,
|
| 8454 |
+
"loss": 43.4123,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.11932156318190835,
|
| 8459 |
+
"grad_norm": 2.063032865524292,
|
| 8460 |
+
"learning_rate": 2.7651627755413946e-05,
|
| 8461 |
+
"loss": 43.2574,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.11942074985121999,
|
| 8466 |
+
"grad_norm": 2.1520659923553467,
|
| 8467 |
+
"learning_rate": 2.751407198822583e-05,
|
| 8468 |
+
"loss": 43.4786,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.11951993652053164,
|
| 8473 |
+
"grad_norm": 2.1703543663024902,
|
| 8474 |
+
"learning_rate": 2.7376804619000707e-05,
|
| 8475 |
+
"loss": 43.3209,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.11961912318984329,
|
| 8480 |
+
"grad_norm": 2.28597092628479,
|
| 8481 |
+
"learning_rate": 2.7239826193880814e-05,
|
| 8482 |
+
"loss": 43.2747,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.11971830985915492,
|
| 8487 |
+
"grad_norm": 1.7825167179107666,
|
| 8488 |
+
"learning_rate": 2.7103137257858868e-05,
|
| 8489 |
+
"loss": 43.1259,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.11981749652846657,
|
| 8494 |
+
"grad_norm": 1.9069029092788696,
|
| 8495 |
+
"learning_rate": 2.696673835477569e-05,
|
| 8496 |
+
"loss": 43.4112,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.11991668319777822,
|
| 8501 |
+
"grad_norm": 1.938519835472107,
|
| 8502 |
+
"learning_rate": 2.6830630027318238e-05,
|
| 8503 |
+
"loss": 42.9466,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.12001586986708987,
|
| 8508 |
+
"grad_norm": 1.928252100944519,
|
| 8509 |
+
"learning_rate": 2.669481281701739e-05,
|
| 8510 |
+
"loss": 43.5498,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.1201150565364015,
|
| 8515 |
+
"grad_norm": 2.1338207721710205,
|
| 8516 |
+
"learning_rate": 2.6559287264245612e-05,
|
| 8517 |
+
"loss": 43.3255,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.12021424320571315,
|
| 8522 |
+
"grad_norm": 1.8449137210845947,
|
| 8523 |
+
"learning_rate": 2.6424053908215128e-05,
|
| 8524 |
+
"loss": 42.8385,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.1203134298750248,
|
| 8529 |
+
"grad_norm": 2.0243680477142334,
|
| 8530 |
+
"learning_rate": 2.6289113286975485e-05,
|
| 8531 |
+
"loss": 43.5227,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.12041261654433644,
|
| 8536 |
+
"grad_norm": 2.2616496086120605,
|
| 8537 |
+
"learning_rate": 2.615446593741161e-05,
|
| 8538 |
+
"loss": 43.2939,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.12051180321364809,
|
| 8543 |
+
"grad_norm": 2.0703885555267334,
|
| 8544 |
+
"learning_rate": 2.6020112395241624e-05,
|
| 8545 |
+
"loss": 43.4069,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.12061098988295973,
|
| 8550 |
+
"grad_norm": 1.7418712377548218,
|
| 8551 |
+
"learning_rate": 2.5886053195014538e-05,
|
| 8552 |
+
"loss": 43.3705,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.12071017655227137,
|
| 8557 |
+
"grad_norm": 1.9164007902145386,
|
| 8558 |
+
"learning_rate": 2.5752288870108386e-05,
|
| 8559 |
+
"loss": 43.2423,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.12080936322158302,
|
| 8564 |
+
"grad_norm": 2.1956541538238525,
|
| 8565 |
+
"learning_rate": 2.5618819952728034e-05,
|
| 8566 |
+
"loss": 43.1793,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.12090854989089467,
|
| 8571 |
+
"grad_norm": 2.0033652782440186,
|
| 8572 |
+
"learning_rate": 2.5485646973902865e-05,
|
| 8573 |
+
"loss": 43.0771,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.1210077365602063,
|
| 8578 |
+
"grad_norm": 1.7946172952651978,
|
| 8579 |
+
"learning_rate": 2.5352770463484987e-05,
|
| 8580 |
+
"loss": 43.3914,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.12110692322951795,
|
| 8585 |
+
"grad_norm": 1.674874186515808,
|
| 8586 |
+
"learning_rate": 2.5220190950146827e-05,
|
| 8587 |
+
"loss": 43.0962,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.1212061098988296,
|
| 8592 |
+
"grad_norm": 1.9609571695327759,
|
| 8593 |
+
"learning_rate": 2.508790896137918e-05,
|
| 8594 |
+
"loss": 43.3012,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.12130529656814124,
|
| 8599 |
+
"grad_norm": 1.8102521896362305,
|
| 8600 |
+
"learning_rate": 2.495592502348918e-05,
|
| 8601 |
+
"loss": 43.2702,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.12140448323745288,
|
| 8606 |
+
"grad_norm": 1.860087275505066,
|
| 8607 |
+
"learning_rate": 2.4824239661597982e-05,
|
| 8608 |
+
"loss": 43.2354,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.12150366990676453,
|
| 8613 |
+
"grad_norm": 2.1762025356292725,
|
| 8614 |
+
"learning_rate": 2.4692853399638917e-05,
|
| 8615 |
+
"loss": 43.3649,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.12160285657607618,
|
| 8620 |
+
"grad_norm": 1.994041919708252,
|
| 8621 |
+
"learning_rate": 2.4561766760355188e-05,
|
| 8622 |
+
"loss": 43.5508,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.12170204324538782,
|
| 8627 |
+
"grad_norm": 1.851908564567566,
|
| 8628 |
+
"learning_rate": 2.443098026529803e-05,
|
| 8629 |
+
"loss": 43.5718,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.12180122991469947,
|
| 8634 |
+
"grad_norm": 1.9338980913162231,
|
| 8635 |
+
"learning_rate": 2.4300494434824373e-05,
|
| 8636 |
+
"loss": 42.9808,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.12190041658401111,
|
| 8641 |
+
"grad_norm": 1.7958317995071411,
|
| 8642 |
+
"learning_rate": 2.4170309788094937e-05,
|
| 8643 |
+
"loss": 43.3309,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.12199960325332275,
|
| 8648 |
+
"grad_norm": 1.8505011796951294,
|
| 8649 |
+
"learning_rate": 2.4040426843072206e-05,
|
| 8650 |
+
"loss": 43.1793,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.1220987899226344,
|
| 8655 |
+
"grad_norm": 2.0302295684814453,
|
| 8656 |
+
"learning_rate": 2.391084611651816e-05,
|
| 8657 |
+
"loss": 43.0106,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.12219797659194605,
|
| 8662 |
+
"grad_norm": 1.894091248512268,
|
| 8663 |
+
"learning_rate": 2.3781568123992483e-05,
|
| 8664 |
+
"loss": 43.1211,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.12229716326125768,
|
| 8669 |
+
"grad_norm": 2.3484275341033936,
|
| 8670 |
+
"learning_rate": 2.3652593379850284e-05,
|
| 8671 |
+
"loss": 43.3877,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.12239634993056933,
|
| 8676 |
+
"grad_norm": 1.8333877325057983,
|
| 8677 |
+
"learning_rate": 2.352392239724016e-05,
|
| 8678 |
+
"loss": 43.0703,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.12249553659988098,
|
| 8683 |
+
"grad_norm": 2.122896194458008,
|
| 8684 |
+
"learning_rate": 2.339555568810221e-05,
|
| 8685 |
+
"loss": 43.0659,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.12259472326919262,
|
| 8690 |
+
"grad_norm": 2.1207144260406494,
|
| 8691 |
+
"learning_rate": 2.3267493763165804e-05,
|
| 8692 |
+
"loss": 43.4401,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.12269390993850426,
|
| 8697 |
+
"grad_norm": 2.0203847885131836,
|
| 8698 |
+
"learning_rate": 2.3139737131947824e-05,
|
| 8699 |
+
"loss": 43.4525,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.12279309660781591,
|
| 8704 |
+
"grad_norm": 2.2465195655822754,
|
| 8705 |
+
"learning_rate": 2.3012286302750353e-05,
|
| 8706 |
+
"loss": 43.1463,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.12289228327712755,
|
| 8711 |
+
"grad_norm": 1.7171586751937866,
|
| 8712 |
+
"learning_rate": 2.2885141782658837e-05,
|
| 8713 |
+
"loss": 43.5365,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.1229914699464392,
|
| 8718 |
+
"grad_norm": 1.8718574047088623,
|
| 8719 |
+
"learning_rate": 2.275830407754006e-05,
|
| 8720 |
+
"loss": 43.411,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.12309065661575085,
|
| 8725 |
+
"grad_norm": 2.1240851879119873,
|
| 8726 |
+
"learning_rate": 2.2631773692040014e-05,
|
| 8727 |
+
"loss": 42.9525,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.12318984328506248,
|
| 8732 |
+
"grad_norm": 1.9221850633621216,
|
| 8733 |
+
"learning_rate": 2.2505551129582047e-05,
|
| 8734 |
+
"loss": 43.2732,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.12328902995437413,
|
| 8739 |
+
"grad_norm": 1.9002022743225098,
|
| 8740 |
+
"learning_rate": 2.237963689236472e-05,
|
| 8741 |
+
"loss": 43.4209,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.12338821662368578,
|
| 8746 |
+
"grad_norm": 2.0254499912261963,
|
| 8747 |
+
"learning_rate": 2.225403148135985e-05,
|
| 8748 |
+
"loss": 43.0571,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.12348740329299743,
|
| 8753 |
+
"grad_norm": 1.7537251710891724,
|
| 8754 |
+
"learning_rate": 2.212873539631061e-05,
|
| 8755 |
+
"loss": 43.5282,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.12358658996230906,
|
| 8760 |
+
"grad_norm": 2.5080766677856445,
|
| 8761 |
+
"learning_rate": 2.200374913572939e-05,
|
| 8762 |
+
"loss": 43.4238,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.12368577663162071,
|
| 8767 |
+
"grad_norm": 2.3662972450256348,
|
| 8768 |
+
"learning_rate": 2.187907319689597e-05,
|
| 8769 |
+
"loss": 43.3069,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.12378496330093236,
|
| 8774 |
+
"grad_norm": 1.9061388969421387,
|
| 8775 |
+
"learning_rate": 2.1754708075855357e-05,
|
| 8776 |
+
"loss": 43.2146,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.123884149970244,
|
| 8781 |
+
"grad_norm": 2.3024849891662598,
|
| 8782 |
+
"learning_rate": 2.163065426741603e-05,
|
| 8783 |
+
"loss": 43.1173,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.12398333663955564,
|
| 8788 |
+
"grad_norm": 2.273214101791382,
|
| 8789 |
+
"learning_rate": 2.1506912265147772e-05,
|
| 8790 |
+
"loss": 43.5454,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.12408252330886729,
|
| 8795 |
+
"grad_norm": 1.804397702217102,
|
| 8796 |
+
"learning_rate": 2.1383482561379787e-05,
|
| 8797 |
+
"loss": 43.2978,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.12418170997817893,
|
| 8802 |
+
"grad_norm": 2.0396947860717773,
|
| 8803 |
+
"learning_rate": 2.1260365647198798e-05,
|
| 8804 |
+
"loss": 43.3073,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.12428089664749058,
|
| 8809 |
+
"grad_norm": 1.8155577182769775,
|
| 8810 |
+
"learning_rate": 2.1137562012447053e-05,
|
| 8811 |
+
"loss": 43.4165,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.12438008331680223,
|
| 8816 |
+
"grad_norm": 1.8862123489379883,
|
| 8817 |
+
"learning_rate": 2.1015072145720284e-05,
|
| 8818 |
+
"loss": 43.2342,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.12447926998611386,
|
| 8823 |
+
"grad_norm": 2.216561794281006,
|
| 8824 |
+
"learning_rate": 2.0892896534365904e-05,
|
| 8825 |
+
"loss": 43.2073,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.12457845665542551,
|
| 8830 |
+
"grad_norm": 2.1426873207092285,
|
| 8831 |
+
"learning_rate": 2.0771035664480942e-05,
|
| 8832 |
+
"loss": 43.4463,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.12467764332473716,
|
| 8837 |
+
"grad_norm": 2.119413375854492,
|
| 8838 |
+
"learning_rate": 2.0649490020910244e-05,
|
| 8839 |
+
"loss": 43.1654,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.1247768299940488,
|
| 8844 |
+
"grad_norm": 2.023270606994629,
|
| 8845 |
+
"learning_rate": 2.0528260087244487e-05,
|
| 8846 |
+
"loss": 43.2981,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.12487601666336044,
|
| 8851 |
+
"grad_norm": 1.965747356414795,
|
| 8852 |
+
"learning_rate": 2.0407346345818134e-05,
|
| 8853 |
+
"loss": 43.0578,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.12497520333267209,
|
| 8858 |
+
"grad_norm": 2.065432071685791,
|
| 8859 |
+
"learning_rate": 2.0286749277707782e-05,
|
| 8860 |
+
"loss": 43.1051,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.12507439000198373,
|
| 8865 |
+
"grad_norm": 2.0612261295318604,
|
| 8866 |
+
"learning_rate": 2.016646936272987e-05,
|
| 8867 |
+
"loss": 43.2649,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.1251735766712954,
|
| 8872 |
+
"grad_norm": 1.6817632913589478,
|
| 8873 |
+
"learning_rate": 2.004650707943916e-05,
|
| 8874 |
+
"loss": 43.4544,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.12527276334060702,
|
| 8879 |
+
"grad_norm": 1.6139496564865112,
|
| 8880 |
+
"learning_rate": 1.9926862905126665e-05,
|
| 8881 |
+
"loss": 43.1683,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.12537195000991866,
|
| 8886 |
+
"grad_norm": 1.9174809455871582,
|
| 8887 |
+
"learning_rate": 1.9807537315817604e-05,
|
| 8888 |
+
"loss": 43.3405,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.12547113667923032,
|
| 8893 |
+
"grad_norm": 2.201483964920044,
|
| 8894 |
+
"learning_rate": 1.9688530786269855e-05,
|
| 8895 |
+
"loss": 43.4065,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.12557032334854196,
|
| 8900 |
+
"grad_norm": 2.204475164413452,
|
| 8901 |
+
"learning_rate": 1.9569843789971598e-05,
|
| 8902 |
+
"loss": 43.5532,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.1256695100178536,
|
| 8907 |
+
"grad_norm": 2.340625047683716,
|
| 8908 |
+
"learning_rate": 1.9451476799139935e-05,
|
| 8909 |
+
"loss": 43.0923,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.12576869668716525,
|
| 8914 |
+
"grad_norm": 2.176215410232544,
|
| 8915 |
+
"learning_rate": 1.933343028471867e-05,
|
| 8916 |
+
"loss": 43.1969,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.1258678833564769,
|
| 8921 |
+
"grad_norm": 1.8071084022521973,
|
| 8922 |
+
"learning_rate": 1.9215704716376493e-05,
|
| 8923 |
+
"loss": 43.5128,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.12596707002578852,
|
| 8928 |
+
"grad_norm": 2.074890613555908,
|
| 8929 |
+
"learning_rate": 1.9098300562505266e-05,
|
| 8930 |
+
"loss": 43.2647,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.1260662566951002,
|
| 8935 |
+
"grad_norm": 2.271109104156494,
|
| 8936 |
+
"learning_rate": 1.898121829021795e-05,
|
| 8937 |
+
"loss": 43.1606,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.12616544336441182,
|
| 8942 |
+
"grad_norm": 2.0816376209259033,
|
| 8943 |
+
"learning_rate": 1.886445836534684e-05,
|
| 8944 |
+
"loss": 43.1312,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.12626463003372346,
|
| 8949 |
+
"grad_norm": 2.059058904647827,
|
| 8950 |
+
"learning_rate": 1.8748021252441817e-05,
|
| 8951 |
+
"loss": 43.2318,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.12636381670303512,
|
| 8956 |
+
"grad_norm": 1.7361204624176025,
|
| 8957 |
+
"learning_rate": 1.863190741476828e-05,
|
| 8958 |
+
"loss": 42.9296,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.12646300337234676,
|
| 8963 |
+
"grad_norm": 2.1940884590148926,
|
| 8964 |
+
"learning_rate": 1.8516117314305524e-05,
|
| 8965 |
+
"loss": 43.0325,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.1265621900416584,
|
| 8970 |
+
"grad_norm": 1.8828614950180054,
|
| 8971 |
+
"learning_rate": 1.8400651411744685e-05,
|
| 8972 |
+
"loss": 43.1455,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.12666137671097005,
|
| 8977 |
+
"grad_norm": 2.0172884464263916,
|
| 8978 |
+
"learning_rate": 1.8285510166487152e-05,
|
| 8979 |
+
"loss": 43.5407,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.1267605633802817,
|
| 8984 |
+
"grad_norm": 2.0699732303619385,
|
| 8985 |
+
"learning_rate": 1.8170694036642512e-05,
|
| 8986 |
+
"loss": 43.0967,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.12685975004959332,
|
| 8991 |
+
"grad_norm": 2.134793758392334,
|
| 8992 |
+
"learning_rate": 1.805620347902681e-05,
|
| 8993 |
+
"loss": 43.2935,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.12695893671890499,
|
| 8998 |
+
"grad_norm": 1.983253002166748,
|
| 8999 |
+
"learning_rate": 1.7942038949160854e-05,
|
| 9000 |
+
"loss": 43.2866,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.12705812338821662,
|
| 9005 |
+
"grad_norm": 2.1768510341644287,
|
| 9006 |
+
"learning_rate": 1.782820090126818e-05,
|
| 9007 |
+
"loss": 43.3122,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.12715731005752826,
|
| 9012 |
+
"grad_norm": 2.103102445602417,
|
| 9013 |
+
"learning_rate": 1.771468978827343e-05,
|
| 9014 |
+
"loss": 43.2054,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.12725649672683992,
|
| 9019 |
+
"grad_norm": 1.84243905544281,
|
| 9020 |
+
"learning_rate": 1.7601506061800456e-05,
|
| 9021 |
+
"loss": 43.5376,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.12735568339615155,
|
| 9026 |
+
"grad_norm": 1.672719120979309,
|
| 9027 |
+
"learning_rate": 1.7488650172170496e-05,
|
| 9028 |
+
"loss": 43.2451,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.1274548700654632,
|
| 9033 |
+
"grad_norm": 2.0615015029907227,
|
| 9034 |
+
"learning_rate": 1.7376122568400532e-05,
|
| 9035 |
+
"loss": 43.4047,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.12755405673477485,
|
| 9040 |
+
"grad_norm": 1.944498062133789,
|
| 9041 |
+
"learning_rate": 1.7263923698201302e-05,
|
| 9042 |
+
"loss": 42.7352,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.12765324340408649,
|
| 9047 |
+
"grad_norm": 1.8695775270462036,
|
| 9048 |
+
"learning_rate": 1.715205400797568e-05,
|
| 9049 |
+
"loss": 43.2086,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.12775243007339815,
|
| 9054 |
+
"grad_norm": 1.7782195806503296,
|
| 9055 |
+
"learning_rate": 1.7040513942816906e-05,
|
| 9056 |
+
"loss": 43.4218,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.12785161674270978,
|
| 9061 |
+
"grad_norm": 2.1169474124908447,
|
| 9062 |
+
"learning_rate": 1.6929303946506537e-05,
|
| 9063 |
+
"loss": 43.2795,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.12795080341202142,
|
| 9068 |
+
"grad_norm": 1.8781485557556152,
|
| 9069 |
+
"learning_rate": 1.681842446151313e-05,
|
| 9070 |
+
"loss": 43.1115,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.12804999008133308,
|
| 9075 |
+
"grad_norm": 1.9441792964935303,
|
| 9076 |
+
"learning_rate": 1.6707875928990058e-05,
|
| 9077 |
+
"loss": 43.3854,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.12814917675064472,
|
| 9082 |
+
"grad_norm": 1.9231170415878296,
|
| 9083 |
+
"learning_rate": 1.6597658788774062e-05,
|
| 9084 |
+
"loss": 43.5739,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.12824836341995635,
|
| 9089 |
+
"grad_norm": 2.0247931480407715,
|
| 9090 |
+
"learning_rate": 1.6487773479383407e-05,
|
| 9091 |
+
"loss": 42.9718,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.12834755008926801,
|
| 9096 |
+
"grad_norm": 1.8527014255523682,
|
| 9097 |
+
"learning_rate": 1.6378220438015933e-05,
|
| 9098 |
+
"loss": 43.2071,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.12844673675857965,
|
| 9103 |
+
"grad_norm": 1.656203031539917,
|
| 9104 |
+
"learning_rate": 1.6269000100547683e-05,
|
| 9105 |
+
"loss": 43.3436,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.12854592342789128,
|
| 9110 |
+
"grad_norm": 1.7851769924163818,
|
| 9111 |
+
"learning_rate": 1.6160112901530855e-05,
|
| 9112 |
+
"loss": 43.2198,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.12864511009720295,
|
| 9117 |
+
"grad_norm": 2.076759099960327,
|
| 9118 |
+
"learning_rate": 1.6051559274192275e-05,
|
| 9119 |
+
"loss": 43.082,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.12874429676651458,
|
| 9124 |
+
"grad_norm": 1.891150951385498,
|
| 9125 |
+
"learning_rate": 1.5943339650431576e-05,
|
| 9126 |
+
"loss": 43.5083,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.12884348343582622,
|
| 9131 |
+
"grad_norm": 1.8923282623291016,
|
| 9132 |
+
"learning_rate": 1.5835454460819478e-05,
|
| 9133 |
+
"loss": 43.7355,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.12894267010513788,
|
| 9138 |
+
"grad_norm": 2.4642977714538574,
|
| 9139 |
+
"learning_rate": 1.5727904134596083e-05,
|
| 9140 |
+
"loss": 43.21,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.12904185677444951,
|
| 9145 |
+
"grad_norm": 2.1630120277404785,
|
| 9146 |
+
"learning_rate": 1.562068909966925e-05,
|
| 9147 |
+
"loss": 43.4516,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.12914104344376115,
|
| 9152 |
+
"grad_norm": 1.808556318283081,
|
| 9153 |
+
"learning_rate": 1.5513809782612732e-05,
|
| 9154 |
+
"loss": 43.1653,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.1292402301130728,
|
| 9159 |
+
"grad_norm": 2.1409966945648193,
|
| 9160 |
+
"learning_rate": 1.540726660866466e-05,
|
| 9161 |
+
"loss": 43.0905,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.12933941678238445,
|
| 9166 |
+
"grad_norm": 2.0134332180023193,
|
| 9167 |
+
"learning_rate": 1.5301060001725696e-05,
|
| 9168 |
+
"loss": 43.4074,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.12943860345169608,
|
| 9173 |
+
"grad_norm": 1.9736688137054443,
|
| 9174 |
+
"learning_rate": 1.5195190384357404e-05,
|
| 9175 |
+
"loss": 43.4735,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.12953779012100775,
|
| 9180 |
+
"grad_norm": 2.093540906906128,
|
| 9181 |
+
"learning_rate": 1.5089658177780653e-05,
|
| 9182 |
+
"loss": 42.8961,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.12963697679031938,
|
| 9187 |
+
"grad_norm": 1.6421780586242676,
|
| 9188 |
+
"learning_rate": 1.4984463801873771e-05,
|
| 9189 |
+
"loss": 43.3513,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.12973616345963102,
|
| 9194 |
+
"grad_norm": 1.8945568799972534,
|
| 9195 |
+
"learning_rate": 1.4879607675171048e-05,
|
| 9196 |
+
"loss": 43.3146,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.12983535012894268,
|
| 9201 |
+
"grad_norm": 1.6984034776687622,
|
| 9202 |
+
"learning_rate": 1.477509021486091e-05,
|
| 9203 |
+
"loss": 43.471,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.1299345367982543,
|
| 9208 |
+
"grad_norm": 1.8182692527770996,
|
| 9209 |
+
"learning_rate": 1.467091183678444e-05,
|
| 9210 |
+
"loss": 43.119,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.13003372346756595,
|
| 9215 |
+
"grad_norm": 2.1077167987823486,
|
| 9216 |
+
"learning_rate": 1.4567072955433525e-05,
|
| 9217 |
+
"loss": 43.3984,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.1301329101368776,
|
| 9222 |
+
"grad_norm": 1.8765023946762085,
|
| 9223 |
+
"learning_rate": 1.4463573983949341e-05,
|
| 9224 |
+
"loss": 43.4953,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.13023209680618925,
|
| 9229 |
+
"grad_norm": 1.636028528213501,
|
| 9230 |
+
"learning_rate": 1.4360415334120703e-05,
|
| 9231 |
+
"loss": 43.3802,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.13033128347550088,
|
| 9236 |
+
"grad_norm": 2.2578110694885254,
|
| 9237 |
+
"learning_rate": 1.425759741638234e-05,
|
| 9238 |
+
"loss": 43.1102,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.13043047014481254,
|
| 9243 |
+
"grad_norm": 2.3040664196014404,
|
| 9244 |
+
"learning_rate": 1.415512063981339e-05,
|
| 9245 |
+
"loss": 43.0854,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.13052965681412418,
|
| 9250 |
+
"grad_norm": 1.9937888383865356,
|
| 9251 |
+
"learning_rate": 1.4052985412135644e-05,
|
| 9252 |
+
"loss": 43.4732,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.1306288434834358,
|
| 9257 |
+
"grad_norm": 2.158534526824951,
|
| 9258 |
+
"learning_rate": 1.3951192139711967e-05,
|
| 9259 |
+
"loss": 43.3408,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.13072803015274748,
|
| 9264 |
+
"grad_norm": 1.9337798357009888,
|
| 9265 |
+
"learning_rate": 1.3849741227544777e-05,
|
| 9266 |
+
"loss": 43.342,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.1308272168220591,
|
| 9271 |
+
"grad_norm": 2.0596728324890137,
|
| 9272 |
+
"learning_rate": 1.3748633079274253e-05,
|
| 9273 |
+
"loss": 43.4278,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.13092640349137075,
|
| 9278 |
+
"grad_norm": 2.038022756576538,
|
| 9279 |
+
"learning_rate": 1.364786809717692e-05,
|
| 9280 |
+
"loss": 43.4148,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.1310255901606824,
|
| 9285 |
+
"grad_norm": 1.8839263916015625,
|
| 9286 |
+
"learning_rate": 1.3547446682163889e-05,
|
| 9287 |
+
"loss": 43.4531,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.13112477682999404,
|
| 9292 |
+
"grad_norm": 1.9606465101242065,
|
| 9293 |
+
"learning_rate": 1.3447369233779328e-05,
|
| 9294 |
+
"loss": 43.5344,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.1312239634993057,
|
| 9299 |
+
"grad_norm": 1.9413743019104004,
|
| 9300 |
+
"learning_rate": 1.334763615019895e-05,
|
| 9301 |
+
"loss": 43.4744,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.13132315016861734,
|
| 9306 |
+
"grad_norm": 1.854245662689209,
|
| 9307 |
+
"learning_rate": 1.3248247828228245e-05,
|
| 9308 |
+
"loss": 43.3531,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.13142233683792898,
|
| 9313 |
+
"grad_norm": 2.3074612617492676,
|
| 9314 |
+
"learning_rate": 1.3149204663301118e-05,
|
| 9315 |
+
"loss": 42.744,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.13152152350724064,
|
| 9320 |
+
"grad_norm": 2.1084277629852295,
|
| 9321 |
+
"learning_rate": 1.30505070494781e-05,
|
| 9322 |
+
"loss": 43.2841,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.13162071017655227,
|
| 9327 |
+
"grad_norm": 1.9140610694885254,
|
| 9328 |
+
"learning_rate": 1.2952155379444975e-05,
|
| 9329 |
+
"loss": 43.1767,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.1317198968458639,
|
| 9334 |
+
"grad_norm": 2.0206356048583984,
|
| 9335 |
+
"learning_rate": 1.2854150044511093e-05,
|
| 9336 |
+
"loss": 43.2794,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.13181908351517557,
|
| 9341 |
+
"grad_norm": 1.9836020469665527,
|
| 9342 |
+
"learning_rate": 1.2756491434607798e-05,
|
| 9343 |
+
"loss": 43.3647,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.1319182701844872,
|
| 9348 |
+
"grad_norm": 1.9017232656478882,
|
| 9349 |
+
"learning_rate": 1.2659179938287035e-05,
|
| 9350 |
+
"loss": 43.3222,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.13201745685379884,
|
| 9355 |
+
"grad_norm": 1.9977402687072754,
|
| 9356 |
+
"learning_rate": 1.2562215942719602e-05,
|
| 9357 |
+
"loss": 43.2745,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.1321166435231105,
|
| 9362 |
+
"grad_norm": 1.8931244611740112,
|
| 9363 |
+
"learning_rate": 1.2465599833693774e-05,
|
| 9364 |
+
"loss": 43.3366,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.13221583019242214,
|
| 9369 |
+
"grad_norm": 2.0546786785125732,
|
| 9370 |
+
"learning_rate": 1.2369331995613665e-05,
|
| 9371 |
+
"loss": 43.6082,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.13231501686173378,
|
| 9376 |
+
"grad_norm": 2.052030086517334,
|
| 9377 |
+
"learning_rate": 1.2273412811497675e-05,
|
| 9378 |
+
"loss": 43.4364,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.13241420353104544,
|
| 9383 |
+
"grad_norm": 1.8475464582443237,
|
| 9384 |
+
"learning_rate": 1.2177842662977135e-05,
|
| 9385 |
+
"loss": 43.3296,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.13251339020035707,
|
| 9390 |
+
"grad_norm": 1.7756606340408325,
|
| 9391 |
+
"learning_rate": 1.2082621930294635e-05,
|
| 9392 |
+
"loss": 43.4899,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.1326125768696687,
|
| 9397 |
+
"grad_norm": 1.804787516593933,
|
| 9398 |
+
"learning_rate": 1.1987750992302505e-05,
|
| 9399 |
+
"loss": 43.2916,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.13271176353898037,
|
| 9404 |
+
"grad_norm": 1.8083245754241943,
|
| 9405 |
+
"learning_rate": 1.1893230226461438e-05,
|
| 9406 |
+
"loss": 43.1757,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.132810950208292,
|
| 9411 |
+
"grad_norm": 2.0019054412841797,
|
| 9412 |
+
"learning_rate": 1.1799060008838791e-05,
|
| 9413 |
+
"loss": 43.1772,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.13291013687760364,
|
| 9418 |
+
"grad_norm": 2.2276241779327393,
|
| 9419 |
+
"learning_rate": 1.1705240714107302e-05,
|
| 9420 |
+
"loss": 43.401,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.1330093235469153,
|
| 9425 |
+
"grad_norm": 1.9074021577835083,
|
| 9426 |
+
"learning_rate": 1.1611772715543501e-05,
|
| 9427 |
+
"loss": 43.6924,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.13310851021622694,
|
| 9432 |
+
"grad_norm": 2.118516206741333,
|
| 9433 |
+
"learning_rate": 1.151865638502615e-05,
|
| 9434 |
+
"loss": 42.9888,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.13320769688553857,
|
| 9439 |
+
"grad_norm": 2.1859447956085205,
|
| 9440 |
+
"learning_rate": 1.142589209303494e-05,
|
| 9441 |
+
"loss": 43.3181,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.13330688355485024,
|
| 9446 |
+
"grad_norm": 2.099339008331299,
|
| 9447 |
+
"learning_rate": 1.1333480208648783e-05,
|
| 9448 |
+
"loss": 43.4385,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.13340607022416187,
|
| 9453 |
+
"grad_norm": 1.902701497077942,
|
| 9454 |
+
"learning_rate": 1.124142109954459e-05,
|
| 9455 |
+
"loss": 43.2821,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.1335052568934735,
|
| 9460 |
+
"grad_norm": 1.9454385042190552,
|
| 9461 |
+
"learning_rate": 1.1149715131995675e-05,
|
| 9462 |
+
"loss": 43.3447,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.13360444356278517,
|
| 9467 |
+
"grad_norm": 1.7937370538711548,
|
| 9468 |
+
"learning_rate": 1.1058362670870249e-05,
|
| 9469 |
+
"loss": 43.4159,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.1337036302320968,
|
| 9474 |
+
"grad_norm": 1.8799430131912231,
|
| 9475 |
+
"learning_rate": 1.0967364079630115e-05,
|
| 9476 |
+
"loss": 43.2879,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.13380281690140844,
|
| 9481 |
+
"grad_norm": 2.046013116836548,
|
| 9482 |
+
"learning_rate": 1.087671972032911e-05,
|
| 9483 |
+
"loss": 43.5442,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.1339020035707201,
|
| 9488 |
+
"grad_norm": 2.1626346111297607,
|
| 9489 |
+
"learning_rate": 1.0786429953611666e-05,
|
| 9490 |
+
"loss": 43.2945,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.13400119024003174,
|
| 9495 |
+
"grad_norm": 2.2135393619537354,
|
| 9496 |
+
"learning_rate": 1.069649513871147e-05,
|
| 9497 |
+
"loss": 42.9408,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.13410037690934337,
|
| 9502 |
+
"grad_norm": 1.9012311697006226,
|
| 9503 |
+
"learning_rate": 1.060691563344991e-05,
|
| 9504 |
+
"loss": 43.1664,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.13419956357865503,
|
| 9509 |
+
"grad_norm": 1.7858079671859741,
|
| 9510 |
+
"learning_rate": 1.0517691794234774e-05,
|
| 9511 |
+
"loss": 43.3394,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.13429875024796667,
|
| 9516 |
+
"grad_norm": 2.167832374572754,
|
| 9517 |
+
"learning_rate": 1.042882397605871e-05,
|
| 9518 |
+
"loss": 43.4867,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.1343979369172783,
|
| 9523 |
+
"grad_norm": 1.9485419988632202,
|
| 9524 |
+
"learning_rate": 1.034031253249792e-05,
|
| 9525 |
+
"loss": 42.9033,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.13449712358658997,
|
| 9530 |
+
"grad_norm": 2.4672844409942627,
|
| 9531 |
+
"learning_rate": 1.0252157815710684e-05,
|
| 9532 |
+
"loss": 43.4268,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.1345963102559016,
|
| 9537 |
+
"grad_norm": 1.6653025150299072,
|
| 9538 |
+
"learning_rate": 1.0164360176435961e-05,
|
| 9539 |
+
"loss": 43.1918,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.13469549692521327,
|
| 9544 |
+
"grad_norm": 1.8325529098510742,
|
| 9545 |
+
"learning_rate": 1.007691996399207e-05,
|
| 9546 |
+
"loss": 43.4449,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.1347946835945249,
|
| 9551 |
+
"grad_norm": 1.8741205930709839,
|
| 9552 |
+
"learning_rate": 9.989837526275192e-06,
|
| 9553 |
+
"loss": 43.4513,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.13489387026383654,
|
| 9558 |
+
"grad_norm": 2.0469043254852295,
|
| 9559 |
+
"learning_rate": 9.903113209758096e-06,
|
| 9560 |
+
"loss": 43.2695,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.1349930569331482,
|
| 9565 |
+
"grad_norm": 2.0771496295928955,
|
| 9566 |
+
"learning_rate": 9.816747359488632e-06,
|
| 9567 |
+
"loss": 43.3757,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.13509224360245983,
|
| 9572 |
+
"grad_norm": 1.9059168100357056,
|
| 9573 |
+
"learning_rate": 9.730740319088471e-06,
|
| 9574 |
+
"loss": 43.4962,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.13519143027177147,
|
| 9579 |
+
"grad_norm": 1.8240395784378052,
|
| 9580 |
+
"learning_rate": 9.6450924307517e-06,
|
| 9581 |
+
"loss": 43.341,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.13529061694108313,
|
| 9586 |
+
"grad_norm": 1.579367995262146,
|
| 9587 |
+
"learning_rate": 9.559804035243435e-06,
|
| 9588 |
+
"loss": 43.3169,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.13538980361039477,
|
| 9593 |
+
"grad_norm": 1.9014238119125366,
|
| 9594 |
+
"learning_rate": 9.474875471898526e-06,
|
| 9595 |
+
"loss": 43.4465,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.1354889902797064,
|
| 9600 |
+
"grad_norm": 1.7053872346878052,
|
| 9601 |
+
"learning_rate": 9.39030707862013e-06,
|
| 9602 |
+
"loss": 43.3769,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.13558817694901806,
|
| 9607 |
+
"grad_norm": 1.6152608394622803,
|
| 9608 |
+
"learning_rate": 9.306099191878381e-06,
|
| 9609 |
+
"loss": 43.1572,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.1356873636183297,
|
| 9614 |
+
"grad_norm": 1.9447392225265503,
|
| 9615 |
+
"learning_rate": 9.222252146709142e-06,
|
| 9616 |
+
"loss": 43.4481,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.13578655028764133,
|
| 9621 |
+
"grad_norm": 2.076678991317749,
|
| 9622 |
+
"learning_rate": 9.138766276712552e-06,
|
| 9623 |
+
"loss": 43.2435,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.135885736956953,
|
| 9628 |
+
"grad_norm": 2.119514226913452,
|
| 9629 |
+
"learning_rate": 9.055641914051782e-06,
|
| 9630 |
+
"loss": 43.4363,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.13598492362626463,
|
| 9635 |
+
"grad_norm": 2.188779592514038,
|
| 9636 |
+
"learning_rate": 8.972879389451717e-06,
|
| 9637 |
+
"loss": 43.3174,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.13608411029557627,
|
| 9642 |
+
"grad_norm": 1.7174829244613647,
|
| 9643 |
+
"learning_rate": 8.890479032197464e-06,
|
| 9644 |
+
"loss": 43.3041,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.13618329696488793,
|
| 9649 |
+
"grad_norm": 1.9246824979782104,
|
| 9650 |
+
"learning_rate": 8.808441170133341e-06,
|
| 9651 |
+
"loss": 43.34,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.13628248363419956,
|
| 9656 |
+
"grad_norm": 2.1963016986846924,
|
| 9657 |
+
"learning_rate": 8.726766129661335e-06,
|
| 9658 |
+
"loss": 43.0848,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.1363816703035112,
|
| 9663 |
+
"grad_norm": 1.769993782043457,
|
| 9664 |
+
"learning_rate": 8.645454235739903e-06,
|
| 9665 |
+
"loss": 43.3259,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.13648085697282286,
|
| 9670 |
+
"grad_norm": 2.338942766189575,
|
| 9671 |
+
"learning_rate": 8.564505811882684e-06,
|
| 9672 |
+
"loss": 43.1052,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.1365800436421345,
|
| 9677 |
+
"grad_norm": 1.9182593822479248,
|
| 9678 |
+
"learning_rate": 8.483921180157151e-06,
|
| 9679 |
+
"loss": 43.1666,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.13667923031144613,
|
| 9684 |
+
"grad_norm": 2.3619401454925537,
|
| 9685 |
+
"learning_rate": 8.403700661183355e-06,
|
| 9686 |
+
"loss": 43.318,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.1367784169807578,
|
| 9691 |
+
"grad_norm": 1.9328022003173828,
|
| 9692 |
+
"learning_rate": 8.3238445741327e-06,
|
| 9693 |
+
"loss": 43.3504,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.13687760365006943,
|
| 9698 |
+
"grad_norm": 1.9397410154342651,
|
| 9699 |
+
"learning_rate": 8.24435323672661e-06,
|
| 9700 |
+
"loss": 43.4618,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.13697679031938106,
|
| 9705 |
+
"grad_norm": 1.9992848634719849,
|
| 9706 |
+
"learning_rate": 8.165226965235328e-06,
|
| 9707 |
+
"loss": 43.3246,
|
| 9708 |
+
"step": 1381
|
| 9709 |
+
},
|
| 9710 |
+
{
|
| 9711 |
+
"epoch": 0.13707597698869273,
|
| 9712 |
+
"grad_norm": 2.130439519882202,
|
| 9713 |
+
"learning_rate": 8.086466074476563e-06,
|
| 9714 |
+
"loss": 42.8643,
|
| 9715 |
+
"step": 1382
|
| 9716 |
+
},
|
| 9717 |
+
{
|
| 9718 |
+
"epoch": 0.13717516365800436,
|
| 9719 |
+
"grad_norm": 2.266770124435425,
|
| 9720 |
+
"learning_rate": 8.008070877814323e-06,
|
| 9721 |
+
"loss": 43.0878,
|
| 9722 |
+
"step": 1383
|
| 9723 |
+
},
|
| 9724 |
+
{
|
| 9725 |
+
"epoch": 0.137274350327316,
|
| 9726 |
+
"grad_norm": 1.857071042060852,
|
| 9727 |
+
"learning_rate": 7.930041687157607e-06,
|
| 9728 |
+
"loss": 43.1259,
|
| 9729 |
+
"step": 1384
|
| 9730 |
+
},
|
| 9731 |
+
{
|
| 9732 |
+
"epoch": 0.13737353699662766,
|
| 9733 |
+
"grad_norm": 1.9367189407348633,
|
| 9734 |
+
"learning_rate": 7.852378812959227e-06,
|
| 9735 |
+
"loss": 42.8461,
|
| 9736 |
+
"step": 1385
|
| 9737 |
+
},
|
| 9738 |
+
{
|
| 9739 |
+
"epoch": 0.1374727236659393,
|
| 9740 |
+
"grad_norm": 2.1069321632385254,
|
| 9741 |
+
"learning_rate": 7.775082564214576e-06,
|
| 9742 |
+
"loss": 43.413,
|
| 9743 |
+
"step": 1386
|
| 9744 |
+
},
|
| 9745 |
+
{
|
| 9746 |
+
"epoch": 0.13757191033525093,
|
| 9747 |
+
"grad_norm": 1.8119364976882935,
|
| 9748 |
+
"learning_rate": 7.698153248460271e-06,
|
| 9749 |
+
"loss": 43.4117,
|
| 9750 |
+
"step": 1387
|
| 9751 |
+
},
|
| 9752 |
+
{
|
| 9753 |
+
"epoch": 0.1376710970045626,
|
| 9754 |
+
"grad_norm": 1.9280904531478882,
|
| 9755 |
+
"learning_rate": 7.621591171773102e-06,
|
| 9756 |
+
"loss": 43.449,
|
| 9757 |
+
"step": 1388
|
| 9758 |
+
},
|
| 9759 |
+
{
|
| 9760 |
+
"epoch": 0.13777028367387423,
|
| 9761 |
+
"grad_norm": 1.9866265058517456,
|
| 9762 |
+
"learning_rate": 7.545396638768698e-06,
|
| 9763 |
+
"loss": 43.3194,
|
| 9764 |
+
"step": 1389
|
| 9765 |
+
},
|
| 9766 |
+
{
|
| 9767 |
+
"epoch": 0.13786947034318586,
|
| 9768 |
+
"grad_norm": 1.9811984300613403,
|
| 9769 |
+
"learning_rate": 7.46956995260033e-06,
|
| 9770 |
+
"loss": 43.0178,
|
| 9771 |
+
"step": 1390
|
| 9772 |
+
},
|
| 9773 |
+
{
|
| 9774 |
+
"epoch": 0.13796865701249753,
|
| 9775 |
+
"grad_norm": 1.9850718975067139,
|
| 9776 |
+
"learning_rate": 7.394111414957783e-06,
|
| 9777 |
+
"loss": 43.2482,
|
| 9778 |
+
"step": 1391
|
| 9779 |
+
},
|
| 9780 |
+
{
|
| 9781 |
+
"epoch": 0.13806784368180916,
|
| 9782 |
+
"grad_norm": 1.7533329725265503,
|
| 9783 |
+
"learning_rate": 7.319021326066055e-06,
|
| 9784 |
+
"loss": 42.986,
|
| 9785 |
+
"step": 1392
|
| 9786 |
+
},
|
| 9787 |
+
{
|
| 9788 |
+
"epoch": 0.13816703035112082,
|
| 9789 |
+
"grad_norm": 2.037351131439209,
|
| 9790 |
+
"learning_rate": 7.244299984684233e-06,
|
| 9791 |
+
"loss": 43.138,
|
| 9792 |
+
"step": 1393
|
| 9793 |
+
},
|
| 9794 |
+
{
|
| 9795 |
+
"epoch": 0.13826621702043246,
|
| 9796 |
+
"grad_norm": 2.17842173576355,
|
| 9797 |
+
"learning_rate": 7.16994768810425e-06,
|
| 9798 |
+
"loss": 43.1264,
|
| 9799 |
+
"step": 1394
|
| 9800 |
+
},
|
| 9801 |
+
{
|
| 9802 |
+
"epoch": 0.1383654036897441,
|
| 9803 |
+
"grad_norm": 1.6671637296676636,
|
| 9804 |
+
"learning_rate": 7.09596473214974e-06,
|
| 9805 |
+
"loss": 43.2072,
|
| 9806 |
+
"step": 1395
|
| 9807 |
+
},
|
| 9808 |
+
{
|
| 9809 |
+
"epoch": 0.13846459035905576,
|
| 9810 |
+
"grad_norm": 2.137096643447876,
|
| 9811 |
+
"learning_rate": 7.022351411174866e-06,
|
| 9812 |
+
"loss": 43.5997,
|
| 9813 |
+
"step": 1396
|
| 9814 |
+
},
|
| 9815 |
+
{
|
| 9816 |
+
"epoch": 0.1385637770283674,
|
| 9817 |
+
"grad_norm": 1.9496256113052368,
|
| 9818 |
+
"learning_rate": 6.949108018063111e-06,
|
| 9819 |
+
"loss": 43.1338,
|
| 9820 |
+
"step": 1397
|
| 9821 |
+
},
|
| 9822 |
+
{
|
| 9823 |
+
"epoch": 0.13866296369767903,
|
| 9824 |
+
"grad_norm": 1.8803553581237793,
|
| 9825 |
+
"learning_rate": 6.87623484422616e-06,
|
| 9826 |
+
"loss": 43.2347,
|
| 9827 |
+
"step": 1398
|
| 9828 |
+
},
|
| 9829 |
+
{
|
| 9830 |
+
"epoch": 0.1387621503669907,
|
| 9831 |
+
"grad_norm": 1.9867782592773438,
|
| 9832 |
+
"learning_rate": 6.803732179602684e-06,
|
| 9833 |
+
"loss": 43.1956,
|
| 9834 |
+
"step": 1399
|
| 9835 |
+
},
|
| 9836 |
+
{
|
| 9837 |
+
"epoch": 0.13886133703630232,
|
| 9838 |
+
"grad_norm": 1.8301564455032349,
|
| 9839 |
+
"learning_rate": 6.731600312657238e-06,
|
| 9840 |
+
"loss": 43.3644,
|
| 9841 |
+
"step": 1400
|
| 9842 |
+
},
|
| 9843 |
+
{
|
| 9844 |
+
"epoch": 0.13896052370561396,
|
| 9845 |
+
"grad_norm": 1.8990176916122437,
|
| 9846 |
+
"learning_rate": 6.6598395303791084e-06,
|
| 9847 |
+
"loss": 43.2029,
|
| 9848 |
+
"step": 1401
|
| 9849 |
+
},
|
| 9850 |
+
{
|
| 9851 |
+
"epoch": 0.13905971037492562,
|
| 9852 |
+
"grad_norm": 1.9184850454330444,
|
| 9853 |
+
"learning_rate": 6.5884501182811084e-06,
|
| 9854 |
+
"loss": 43.3385,
|
| 9855 |
+
"step": 1402
|
| 9856 |
+
},
|
| 9857 |
+
{
|
| 9858 |
+
"epoch": 0.13915889704423726,
|
| 9859 |
+
"grad_norm": 2.2970166206359863,
|
| 9860 |
+
"learning_rate": 6.517432360398556e-06,
|
| 9861 |
+
"loss": 43.1922,
|
| 9862 |
+
"step": 1403
|
| 9863 |
+
},
|
| 9864 |
+
{
|
| 9865 |
+
"epoch": 0.1392580837135489,
|
| 9866 |
+
"grad_norm": 1.886245608329773,
|
| 9867 |
+
"learning_rate": 6.4467865392880185e-06,
|
| 9868 |
+
"loss": 43.4044,
|
| 9869 |
+
"step": 1404
|
| 9870 |
+
},
|
| 9871 |
+
{
|
| 9872 |
+
"epoch": 0.13935727038286055,
|
| 9873 |
+
"grad_norm": 2.2310242652893066,
|
| 9874 |
+
"learning_rate": 6.37651293602628e-06,
|
| 9875 |
+
"loss": 43.4535,
|
| 9876 |
+
"step": 1405
|
| 9877 |
+
},
|
| 9878 |
+
{
|
| 9879 |
+
"epoch": 0.1394564570521722,
|
| 9880 |
+
"grad_norm": 2.276836395263672,
|
| 9881 |
+
"learning_rate": 6.306611830209186e-06,
|
| 9882 |
+
"loss": 43.0317,
|
| 9883 |
+
"step": 1406
|
| 9884 |
+
},
|
| 9885 |
+
{
|
| 9886 |
+
"epoch": 0.13955564372148382,
|
| 9887 |
+
"grad_norm": 1.8266184329986572,
|
| 9888 |
+
"learning_rate": 6.237083499950514e-06,
|
| 9889 |
+
"loss": 43.3212,
|
| 9890 |
+
"step": 1407
|
| 9891 |
+
},
|
| 9892 |
+
{
|
| 9893 |
+
"epoch": 0.1396548303907955,
|
| 9894 |
+
"grad_norm": 1.864218831062317,
|
| 9895 |
+
"learning_rate": 6.167928221880926e-06,
|
| 9896 |
+
"loss": 43.244,
|
| 9897 |
+
"step": 1408
|
| 9898 |
+
},
|
| 9899 |
+
{
|
| 9900 |
+
"epoch": 0.13975401706010712,
|
| 9901 |
+
"grad_norm": 1.874491572380066,
|
| 9902 |
+
"learning_rate": 6.099146271146783e-06,
|
| 9903 |
+
"loss": 43.3536,
|
| 9904 |
+
"step": 1409
|
| 9905 |
+
},
|
| 9906 |
+
{
|
| 9907 |
+
"epoch": 0.13985320372941876,
|
| 9908 |
+
"grad_norm": 2.211622714996338,
|
| 9909 |
+
"learning_rate": 6.030737921409169e-06,
|
| 9910 |
+
"loss": 43.2284,
|
| 9911 |
+
"step": 1410
|
| 9912 |
+
},
|
| 9913 |
+
{
|
| 9914 |
+
"epoch": 0.13995239039873042,
|
| 9915 |
+
"grad_norm": 2.1728804111480713,
|
| 9916 |
+
"learning_rate": 5.9627034448426545e-06,
|
| 9917 |
+
"loss": 43.3853,
|
| 9918 |
+
"step": 1411
|
| 9919 |
+
},
|
| 9920 |
+
{
|
| 9921 |
+
"epoch": 0.14005157706804205,
|
| 9922 |
+
"grad_norm": 2.009639024734497,
|
| 9923 |
+
"learning_rate": 5.895043112134324e-06,
|
| 9924 |
+
"loss": 43.141,
|
| 9925 |
+
"step": 1412
|
| 9926 |
+
},
|
| 9927 |
+
{
|
| 9928 |
+
"epoch": 0.1401507637373537,
|
| 9929 |
+
"grad_norm": 1.8612360954284668,
|
| 9930 |
+
"learning_rate": 5.827757192482686e-06,
|
| 9931 |
+
"loss": 43.4008,
|
| 9932 |
+
"step": 1413
|
| 9933 |
+
},
|
| 9934 |
+
{
|
| 9935 |
+
"epoch": 0.14024995040666535,
|
| 9936 |
+
"grad_norm": 1.8096208572387695,
|
| 9937 |
+
"learning_rate": 5.760845953596527e-06,
|
| 9938 |
+
"loss": 43.1317,
|
| 9939 |
+
"step": 1414
|
| 9940 |
+
},
|
| 9941 |
+
{
|
| 9942 |
+
"epoch": 0.140349137075977,
|
| 9943 |
+
"grad_norm": 1.7508471012115479,
|
| 9944 |
+
"learning_rate": 5.694309661693942e-06,
|
| 9945 |
+
"loss": 43.4499,
|
| 9946 |
+
"step": 1415
|
| 9947 |
+
},
|
| 9948 |
+
{
|
| 9949 |
+
"epoch": 0.14044832374528862,
|
| 9950 |
+
"grad_norm": 2.046123743057251,
|
| 9951 |
+
"learning_rate": 5.62814858150128e-06,
|
| 9952 |
+
"loss": 43.1648,
|
| 9953 |
+
"step": 1416
|
| 9954 |
+
},
|
| 9955 |
+
{
|
| 9956 |
+
"epoch": 0.14054751041460029,
|
| 9957 |
+
"grad_norm": 1.9580470323562622,
|
| 9958 |
+
"learning_rate": 5.562362976251901e-06,
|
| 9959 |
+
"loss": 43.4789,
|
| 9960 |
+
"step": 1417
|
| 9961 |
+
},
|
| 9962 |
+
{
|
| 9963 |
+
"epoch": 0.14064669708391192,
|
| 9964 |
+
"grad_norm": 1.9050185680389404,
|
| 9965 |
+
"learning_rate": 5.496953107685421e-06,
|
| 9966 |
+
"loss": 43.4049,
|
| 9967 |
+
"step": 1418
|
| 9968 |
+
},
|
| 9969 |
+
{
|
| 9970 |
+
"epoch": 0.14074588375322356,
|
| 9971 |
+
"grad_norm": 1.990479826927185,
|
| 9972 |
+
"learning_rate": 5.431919236046424e-06,
|
| 9973 |
+
"loss": 43.6193,
|
| 9974 |
+
"step": 1419
|
| 9975 |
+
},
|
| 9976 |
+
{
|
| 9977 |
+
"epoch": 0.14084507042253522,
|
| 9978 |
+
"grad_norm": 2.1615607738494873,
|
| 9979 |
+
"learning_rate": 5.367261620083575e-06,
|
| 9980 |
+
"loss": 43.3743,
|
| 9981 |
+
"step": 1420
|
| 9982 |
+
},
|
| 9983 |
+
{
|
| 9984 |
+
"epoch": 0.14094425709184685,
|
| 9985 |
+
"grad_norm": 1.8369649648666382,
|
| 9986 |
+
"learning_rate": 5.3029805170485615e-06,
|
| 9987 |
+
"loss": 43.1792,
|
| 9988 |
+
"step": 1421
|
| 9989 |
+
},
|
| 9990 |
+
{
|
| 9991 |
+
"epoch": 0.1410434437611585,
|
| 9992 |
+
"grad_norm": 1.950953483581543,
|
| 9993 |
+
"learning_rate": 5.239076182694946e-06,
|
| 9994 |
+
"loss": 43.1029,
|
| 9995 |
+
"step": 1422
|
| 9996 |
+
},
|
| 9997 |
+
{
|
| 9998 |
+
"epoch": 0.14114263043047015,
|
| 9999 |
+
"grad_norm": 1.8087800741195679,
|
| 10000 |
+
"learning_rate": 5.175548871277358e-06,
|
| 10001 |
+
"loss": 43.2936,
|
| 10002 |
+
"step": 1423
|
| 10003 |
+
},
|
| 10004 |
+
{
|
| 10005 |
+
"epoch": 0.14124181709978179,
|
| 10006 |
+
"grad_norm": 1.8455091714859009,
|
| 10007 |
+
"learning_rate": 5.1123988355503475e-06,
|
| 10008 |
+
"loss": 43.4282,
|
| 10009 |
+
"step": 1424
|
| 10010 |
+
},
|
| 10011 |
+
{
|
| 10012 |
+
"epoch": 0.14134100376909342,
|
| 10013 |
+
"grad_norm": 2.051258087158203,
|
| 10014 |
+
"learning_rate": 5.049626326767365e-06,
|
| 10015 |
+
"loss": 43.3601,
|
| 10016 |
+
"step": 1425
|
| 10017 |
+
},
|
| 10018 |
+
{
|
| 10019 |
+
"epoch": 0.14144019043840508,
|
| 10020 |
+
"grad_norm": 1.9611554145812988,
|
| 10021 |
+
"learning_rate": 4.9872315946798535e-06,
|
| 10022 |
+
"loss": 42.9922,
|
| 10023 |
+
"step": 1426
|
| 10024 |
+
},
|
| 10025 |
+
{
|
| 10026 |
+
"epoch": 0.14153937710771672,
|
| 10027 |
+
"grad_norm": 1.922616958618164,
|
| 10028 |
+
"learning_rate": 4.925214887536167e-06,
|
| 10029 |
+
"loss": 43.211,
|
| 10030 |
+
"step": 1427
|
| 10031 |
+
},
|
| 10032 |
+
{
|
| 10033 |
+
"epoch": 0.14163856377702838,
|
| 10034 |
+
"grad_norm": 1.8477638959884644,
|
| 10035 |
+
"learning_rate": 4.863576452080654e-06,
|
| 10036 |
+
"loss": 43.3722,
|
| 10037 |
+
"step": 1428
|
| 10038 |
+
},
|
| 10039 |
+
{
|
| 10040 |
+
"epoch": 0.14173775044634002,
|
| 10041 |
+
"grad_norm": 1.8967336416244507,
|
| 10042 |
+
"learning_rate": 4.80231653355262e-06,
|
| 10043 |
+
"loss": 43.1327,
|
| 10044 |
+
"step": 1429
|
| 10045 |
+
},
|
| 10046 |
+
{
|
| 10047 |
+
"epoch": 0.14183693711565165,
|
| 10048 |
+
"grad_norm": 1.9159890413284302,
|
| 10049 |
+
"learning_rate": 4.741435375685377e-06,
|
| 10050 |
+
"loss": 43.1812,
|
| 10051 |
+
"step": 1430
|
| 10052 |
+
},
|
| 10053 |
+
{
|
| 10054 |
+
"epoch": 0.14193612378496331,
|
| 10055 |
+
"grad_norm": 1.874843716621399,
|
| 10056 |
+
"learning_rate": 4.680933220705308e-06,
|
| 10057 |
+
"loss": 43.2046,
|
| 10058 |
+
"step": 1431
|
| 10059 |
+
},
|
| 10060 |
+
{
|
| 10061 |
+
"epoch": 0.14203531045427495,
|
| 10062 |
+
"grad_norm": 1.9383811950683594,
|
| 10063 |
+
"learning_rate": 4.620810309330803e-06,
|
| 10064 |
+
"loss": 43.2728,
|
| 10065 |
+
"step": 1432
|
| 10066 |
+
},
|
| 10067 |
+
{
|
| 10068 |
+
"epoch": 0.14213449712358658,
|
| 10069 |
+
"grad_norm": 1.968032717704773,
|
| 10070 |
+
"learning_rate": 4.561066880771392e-06,
|
| 10071 |
+
"loss": 43.2253,
|
| 10072 |
+
"step": 1433
|
| 10073 |
+
},
|
| 10074 |
+
{
|
| 10075 |
+
"epoch": 0.14223368379289825,
|
| 10076 |
+
"grad_norm": 1.9862526655197144,
|
| 10077 |
+
"learning_rate": 4.501703172726812e-06,
|
| 10078 |
+
"loss": 43.0553,
|
| 10079 |
+
"step": 1434
|
| 10080 |
+
},
|
| 10081 |
+
{
|
| 10082 |
+
"epoch": 0.14233287046220988,
|
| 10083 |
+
"grad_norm": 1.8617550134658813,
|
| 10084 |
+
"learning_rate": 4.442719421385922e-06,
|
| 10085 |
+
"loss": 43.5744,
|
| 10086 |
+
"step": 1435
|
| 10087 |
+
},
|
| 10088 |
+
{
|
| 10089 |
+
"epoch": 0.14243205713152152,
|
| 10090 |
+
"grad_norm": 1.9003578424453735,
|
| 10091 |
+
"learning_rate": 4.3841158614259635e-06,
|
| 10092 |
+
"loss": 42.9976,
|
| 10093 |
+
"step": 1436
|
| 10094 |
+
},
|
| 10095 |
+
{
|
| 10096 |
+
"epoch": 0.14253124380083318,
|
| 10097 |
+
"grad_norm": 2.007995128631592,
|
| 10098 |
+
"learning_rate": 4.325892726011427e-06,
|
| 10099 |
+
"loss": 43.184,
|
| 10100 |
+
"step": 1437
|
| 10101 |
+
},
|
| 10102 |
+
{
|
| 10103 |
+
"epoch": 0.14263043047014481,
|
| 10104 |
+
"grad_norm": 1.7672057151794434,
|
| 10105 |
+
"learning_rate": 4.268050246793276e-06,
|
| 10106 |
+
"loss": 43.3004,
|
| 10107 |
+
"step": 1438
|
| 10108 |
+
},
|
| 10109 |
+
{
|
| 10110 |
+
"epoch": 0.14272961713945645,
|
| 10111 |
+
"grad_norm": 1.8758594989776611,
|
| 10112 |
+
"learning_rate": 4.210588653907943e-06,
|
| 10113 |
+
"loss": 43.2087,
|
| 10114 |
+
"step": 1439
|
| 10115 |
+
},
|
| 10116 |
+
{
|
| 10117 |
+
"epoch": 0.1428288038087681,
|
| 10118 |
+
"grad_norm": 1.8925964832305908,
|
| 10119 |
+
"learning_rate": 4.153508175976428e-06,
|
| 10120 |
+
"loss": 43.4412,
|
| 10121 |
+
"step": 1440
|
| 10122 |
+
},
|
| 10123 |
+
{
|
| 10124 |
+
"epoch": 0.14292799047807975,
|
| 10125 |
+
"grad_norm": 2.3640809059143066,
|
| 10126 |
+
"learning_rate": 4.096809040103444e-06,
|
| 10127 |
+
"loss": 43.2102,
|
| 10128 |
+
"step": 1441
|
| 10129 |
+
},
|
| 10130 |
+
{
|
| 10131 |
+
"epoch": 0.14302717714739138,
|
| 10132 |
+
"grad_norm": 2.1208791732788086,
|
| 10133 |
+
"learning_rate": 4.040491471876395e-06,
|
| 10134 |
+
"loss": 43.3119,
|
| 10135 |
+
"step": 1442
|
| 10136 |
+
},
|
| 10137 |
+
{
|
| 10138 |
+
"epoch": 0.14312636381670304,
|
| 10139 |
+
"grad_norm": 1.774800181388855,
|
| 10140 |
+
"learning_rate": 3.984555695364633e-06,
|
| 10141 |
+
"loss": 43.2631,
|
| 10142 |
+
"step": 1443
|
| 10143 |
+
},
|
| 10144 |
+
{
|
| 10145 |
+
"epoch": 0.14322555048601468,
|
| 10146 |
+
"grad_norm": 1.8055626153945923,
|
| 10147 |
+
"learning_rate": 3.9290019331184145e-06,
|
| 10148 |
+
"loss": 43.5828,
|
| 10149 |
+
"step": 1444
|
| 10150 |
+
},
|
| 10151 |
+
{
|
| 10152 |
+
"epoch": 0.14332473715532631,
|
| 10153 |
+
"grad_norm": 1.9461833238601685,
|
| 10154 |
+
"learning_rate": 3.873830406168111e-06,
|
| 10155 |
+
"loss": 43.1233,
|
| 10156 |
+
"step": 1445
|
| 10157 |
+
},
|
| 10158 |
+
{
|
| 10159 |
+
"epoch": 0.14342392382463798,
|
| 10160 |
+
"grad_norm": 1.7962467670440674,
|
| 10161 |
+
"learning_rate": 3.819041334023343e-06,
|
| 10162 |
+
"loss": 43.4959,
|
| 10163 |
+
"step": 1446
|
| 10164 |
+
},
|
| 10165 |
+
{
|
| 10166 |
+
"epoch": 0.1435231104939496,
|
| 10167 |
+
"grad_norm": 2.1074981689453125,
|
| 10168 |
+
"learning_rate": 3.7646349346719955e-06,
|
| 10169 |
+
"loss": 43.4118,
|
| 10170 |
+
"step": 1447
|
| 10171 |
+
},
|
| 10172 |
+
{
|
| 10173 |
+
"epoch": 0.14362229716326125,
|
| 10174 |
+
"grad_norm": 2.183372735977173,
|
| 10175 |
+
"learning_rate": 3.71061142457948e-06,
|
| 10176 |
+
"loss": 43.2801,
|
| 10177 |
+
"step": 1448
|
| 10178 |
+
},
|
| 10179 |
+
{
|
| 10180 |
+
"epoch": 0.1437214838325729,
|
| 10181 |
+
"grad_norm": 1.9834918975830078,
|
| 10182 |
+
"learning_rate": 3.6569710186877937e-06,
|
| 10183 |
+
"loss": 43.2407,
|
| 10184 |
+
"step": 1449
|
| 10185 |
+
},
|
| 10186 |
+
{
|
| 10187 |
+
"epoch": 0.14382067050188455,
|
| 10188 |
+
"grad_norm": 2.0622262954711914,
|
| 10189 |
+
"learning_rate": 3.6037139304146762e-06,
|
| 10190 |
+
"loss": 43.3949,
|
| 10191 |
+
"step": 1450
|
| 10192 |
+
},
|
| 10193 |
+
{
|
| 10194 |
+
"epoch": 0.14391985717119618,
|
| 10195 |
+
"grad_norm": 1.7499358654022217,
|
| 10196 |
+
"learning_rate": 3.5508403716527972e-06,
|
| 10197 |
+
"loss": 43.2971,
|
| 10198 |
+
"step": 1451
|
| 10199 |
+
},
|
| 10200 |
+
{
|
| 10201 |
+
"epoch": 0.14401904384050784,
|
| 10202 |
+
"grad_norm": 2.391040802001953,
|
| 10203 |
+
"learning_rate": 3.4983505527688586e-06,
|
| 10204 |
+
"loss": 43.0924,
|
| 10205 |
+
"step": 1452
|
| 10206 |
+
},
|
| 10207 |
+
{
|
| 10208 |
+
"epoch": 0.14411823050981948,
|
| 10209 |
+
"grad_norm": 2.0308144092559814,
|
| 10210 |
+
"learning_rate": 3.446244682602817e-06,
|
| 10211 |
+
"loss": 43.3327,
|
| 10212 |
+
"step": 1453
|
| 10213 |
+
},
|
| 10214 |
+
{
|
| 10215 |
+
"epoch": 0.1442174171791311,
|
| 10216 |
+
"grad_norm": 1.9284862279891968,
|
| 10217 |
+
"learning_rate": 3.3945229684669843e-06,
|
| 10218 |
+
"loss": 43.3465,
|
| 10219 |
+
"step": 1454
|
| 10220 |
+
},
|
| 10221 |
+
{
|
| 10222 |
+
"epoch": 0.14431660384844278,
|
| 10223 |
+
"grad_norm": 2.1433889865875244,
|
| 10224 |
+
"learning_rate": 3.3431856161452835e-06,
|
| 10225 |
+
"loss": 43.3408,
|
| 10226 |
+
"step": 1455
|
| 10227 |
+
},
|
| 10228 |
+
{
|
| 10229 |
+
"epoch": 0.1444157905177544,
|
| 10230 |
+
"grad_norm": 2.054744005203247,
|
| 10231 |
+
"learning_rate": 3.292232829892361e-06,
|
| 10232 |
+
"loss": 43.3613,
|
| 10233 |
+
"step": 1456
|
| 10234 |
+
},
|
| 10235 |
+
{
|
| 10236 |
+
"epoch": 0.14451497718706605,
|
| 10237 |
+
"grad_norm": 2.4821014404296875,
|
| 10238 |
+
"learning_rate": 3.2416648124327763e-06,
|
| 10239 |
+
"loss": 43.2482,
|
| 10240 |
+
"step": 1457
|
| 10241 |
+
},
|
| 10242 |
+
{
|
| 10243 |
+
"epoch": 0.1446141638563777,
|
| 10244 |
+
"grad_norm": 1.833465576171875,
|
| 10245 |
+
"learning_rate": 3.191481764960269e-06,
|
| 10246 |
+
"loss": 43.2388,
|
| 10247 |
+
"step": 1458
|
| 10248 |
+
},
|
| 10249 |
+
{
|
| 10250 |
+
"epoch": 0.14471335052568934,
|
| 10251 |
+
"grad_norm": 1.9546949863433838,
|
| 10252 |
+
"learning_rate": 3.1416838871368924e-06,
|
| 10253 |
+
"loss": 43.2747,
|
| 10254 |
+
"step": 1459
|
| 10255 |
+
},
|
| 10256 |
+
{
|
| 10257 |
+
"epoch": 0.14481253719500098,
|
| 10258 |
+
"grad_norm": 1.8222929239273071,
|
| 10259 |
+
"learning_rate": 3.092271377092215e-06,
|
| 10260 |
+
"loss": 43.4289,
|
| 10261 |
+
"step": 1460
|
| 10262 |
+
},
|
| 10263 |
+
{
|
| 10264 |
+
"epoch": 0.14491172386431264,
|
| 10265 |
+
"grad_norm": 2.0774927139282227,
|
| 10266 |
+
"learning_rate": 3.043244431422565e-06,
|
| 10267 |
+
"loss": 43.3249,
|
| 10268 |
+
"step": 1461
|
| 10269 |
+
},
|
| 10270 |
+
{
|
| 10271 |
+
"epoch": 0.14501091053362428,
|
| 10272 |
+
"grad_norm": 1.647337794303894,
|
| 10273 |
+
"learning_rate": 2.9946032451902194e-06,
|
| 10274 |
+
"loss": 43.4462,
|
| 10275 |
+
"step": 1462
|
| 10276 |
+
},
|
| 10277 |
+
{
|
| 10278 |
+
"epoch": 0.14511009720293594,
|
| 10279 |
+
"grad_norm": 2.065192461013794,
|
| 10280 |
+
"learning_rate": 2.946348011922673e-06,
|
| 10281 |
+
"loss": 43.1077,
|
| 10282 |
+
"step": 1463
|
| 10283 |
+
},
|
| 10284 |
+
{
|
| 10285 |
+
"epoch": 0.14520928387224757,
|
| 10286 |
+
"grad_norm": 2.2489826679229736,
|
| 10287 |
+
"learning_rate": 2.8984789236118472e-06,
|
| 10288 |
+
"loss": 43.2044,
|
| 10289 |
+
"step": 1464
|
| 10290 |
+
},
|
| 10291 |
+
{
|
| 10292 |
+
"epoch": 0.1453084705415592,
|
| 10293 |
+
"grad_norm": 2.072115659713745,
|
| 10294 |
+
"learning_rate": 2.8509961707132494e-06,
|
| 10295 |
+
"loss": 43.2782,
|
| 10296 |
+
"step": 1465
|
| 10297 |
+
},
|
| 10298 |
+
{
|
| 10299 |
+
"epoch": 0.14540765721087087,
|
| 10300 |
+
"grad_norm": 2.081277370452881,
|
| 10301 |
+
"learning_rate": 2.8038999421453826e-06,
|
| 10302 |
+
"loss": 43.3782,
|
| 10303 |
+
"step": 1466
|
| 10304 |
+
},
|
| 10305 |
+
{
|
| 10306 |
+
"epoch": 0.1455068438801825,
|
| 10307 |
+
"grad_norm": 1.7385681867599487,
|
| 10308 |
+
"learning_rate": 2.7571904252888026e-06,
|
| 10309 |
+
"loss": 43.5454,
|
| 10310 |
+
"step": 1467
|
| 10311 |
+
},
|
| 10312 |
+
{
|
| 10313 |
+
"epoch": 0.14560603054949414,
|
| 10314 |
+
"grad_norm": 1.9023820161819458,
|
| 10315 |
+
"learning_rate": 2.7108678059855065e-06,
|
| 10316 |
+
"loss": 43.317,
|
| 10317 |
+
"step": 1468
|
| 10318 |
+
},
|
| 10319 |
+
{
|
| 10320 |
+
"epoch": 0.1457052172188058,
|
| 10321 |
+
"grad_norm": 1.8666795492172241,
|
| 10322 |
+
"learning_rate": 2.6649322685381783e-06,
|
| 10323 |
+
"loss": 43.2736,
|
| 10324 |
+
"step": 1469
|
| 10325 |
+
},
|
| 10326 |
+
{
|
| 10327 |
+
"epoch": 0.14580440388811744,
|
| 10328 |
+
"grad_norm": 2.1887457370758057,
|
| 10329 |
+
"learning_rate": 2.619383995709368e-06,
|
| 10330 |
+
"loss": 43.4287,
|
| 10331 |
+
"step": 1470
|
| 10332 |
+
},
|
| 10333 |
+
{
|
| 10334 |
+
"epoch": 0.14590359055742907,
|
| 10335 |
+
"grad_norm": 1.7107206583023071,
|
| 10336 |
+
"learning_rate": 2.5742231687209017e-06,
|
| 10337 |
+
"loss": 43.3221,
|
| 10338 |
+
"step": 1471
|
| 10339 |
+
},
|
| 10340 |
+
{
|
| 10341 |
+
"epoch": 0.14600277722674074,
|
| 10342 |
+
"grad_norm": 2.051560878753662,
|
| 10343 |
+
"learning_rate": 2.5294499672529837e-06,
|
| 10344 |
+
"loss": 43.2439,
|
| 10345 |
+
"step": 1472
|
| 10346 |
+
},
|
| 10347 |
+
{
|
| 10348 |
+
"epoch": 0.14610196389605237,
|
| 10349 |
+
"grad_norm": 1.5877865552902222,
|
| 10350 |
+
"learning_rate": 2.4850645694436736e-06,
|
| 10351 |
+
"loss": 43.4726,
|
| 10352 |
+
"step": 1473
|
| 10353 |
+
},
|
| 10354 |
+
{
|
| 10355 |
+
"epoch": 0.146201150565364,
|
| 10356 |
+
"grad_norm": 1.9732997417449951,
|
| 10357 |
+
"learning_rate": 2.4410671518880655e-06,
|
| 10358 |
+
"loss": 43.4365,
|
| 10359 |
+
"step": 1474
|
| 10360 |
+
},
|
| 10361 |
+
{
|
| 10362 |
+
"epoch": 0.14630033723467567,
|
| 10363 |
+
"grad_norm": 1.9900785684585571,
|
| 10364 |
+
"learning_rate": 2.3974578896375553e-06,
|
| 10365 |
+
"loss": 43.2235,
|
| 10366 |
+
"step": 1475
|
| 10367 |
+
},
|
| 10368 |
+
{
|
| 10369 |
+
"epoch": 0.1463995239039873,
|
| 10370 |
+
"grad_norm": 2.4595346450805664,
|
| 10371 |
+
"learning_rate": 2.354236956199263e-06,
|
| 10372 |
+
"loss": 42.9654,
|
| 10373 |
+
"step": 1476
|
| 10374 |
+
},
|
| 10375 |
+
{
|
| 10376 |
+
"epoch": 0.14649871057329894,
|
| 10377 |
+
"grad_norm": 2.1993980407714844,
|
| 10378 |
+
"learning_rate": 2.311404523535243e-06,
|
| 10379 |
+
"loss": 43.3009,
|
| 10380 |
+
"step": 1477
|
| 10381 |
+
},
|
| 10382 |
+
{
|
| 10383 |
+
"epoch": 0.1465978972426106,
|
| 10384 |
+
"grad_norm": 1.9754064083099365,
|
| 10385 |
+
"learning_rate": 2.2689607620618003e-06,
|
| 10386 |
+
"loss": 43.2765,
|
| 10387 |
+
"step": 1478
|
| 10388 |
+
},
|
| 10389 |
+
{
|
| 10390 |
+
"epoch": 0.14669708391192224,
|
| 10391 |
+
"grad_norm": 2.204399585723877,
|
| 10392 |
+
"learning_rate": 2.2269058406489185e-06,
|
| 10393 |
+
"loss": 43.1855,
|
| 10394 |
+
"step": 1479
|
| 10395 |
+
},
|
| 10396 |
+
{
|
| 10397 |
+
"epoch": 0.14679627058123387,
|
| 10398 |
+
"grad_norm": 1.596371054649353,
|
| 10399 |
+
"learning_rate": 2.1852399266194314e-06,
|
| 10400 |
+
"loss": 43.4602,
|
| 10401 |
+
"step": 1480
|
| 10402 |
+
},
|
| 10403 |
+
{
|
| 10404 |
+
"epoch": 0.14689545725054554,
|
| 10405 |
+
"grad_norm": 1.9966042041778564,
|
| 10406 |
+
"learning_rate": 2.1439631857485098e-06,
|
| 10407 |
+
"loss": 43.2429,
|
| 10408 |
+
"step": 1481
|
| 10409 |
+
},
|
| 10410 |
+
{
|
| 10411 |
+
"epoch": 0.14699464391985717,
|
| 10412 |
+
"grad_norm": 2.0307462215423584,
|
| 10413 |
+
"learning_rate": 2.1030757822628757e-06,
|
| 10414 |
+
"loss": 43.1845,
|
| 10415 |
+
"step": 1482
|
| 10416 |
+
},
|
| 10417 |
+
{
|
| 10418 |
+
"epoch": 0.1470938305891688,
|
| 10419 |
+
"grad_norm": 2.302483320236206,
|
| 10420 |
+
"learning_rate": 2.062577878840244e-06,
|
| 10421 |
+
"loss": 43.3822,
|
| 10422 |
+
"step": 1483
|
| 10423 |
+
},
|
| 10424 |
+
{
|
| 10425 |
+
"epoch": 0.14719301725848047,
|
| 10426 |
+
"grad_norm": 1.6022123098373413,
|
| 10427 |
+
"learning_rate": 2.022469636608604e-06,
|
| 10428 |
+
"loss": 43.4723,
|
| 10429 |
+
"step": 1484
|
| 10430 |
+
},
|
| 10431 |
+
{
|
| 10432 |
+
"epoch": 0.1472922039277921,
|
| 10433 |
+
"grad_norm": 2.2793755531311035,
|
| 10434 |
+
"learning_rate": 1.9827512151456173e-06,
|
| 10435 |
+
"loss": 43.0102,
|
| 10436 |
+
"step": 1485
|
| 10437 |
+
},
|
| 10438 |
+
{
|
| 10439 |
+
"epoch": 0.14739139059710374,
|
| 10440 |
+
"grad_norm": 1.7220956087112427,
|
| 10441 |
+
"learning_rate": 1.9434227724779984e-06,
|
| 10442 |
+
"loss": 43.4071,
|
| 10443 |
+
"step": 1486
|
| 10444 |
+
},
|
| 10445 |
+
{
|
| 10446 |
+
"epoch": 0.1474905772664154,
|
| 10447 |
+
"grad_norm": 2.1022887229919434,
|
| 10448 |
+
"learning_rate": 1.904484465080847e-06,
|
| 10449 |
+
"loss": 42.9929,
|
| 10450 |
+
"step": 1487
|
| 10451 |
+
},
|
| 10452 |
+
{
|
| 10453 |
+
"epoch": 0.14758976393572704,
|
| 10454 |
+
"grad_norm": 1.925744891166687,
|
| 10455 |
+
"learning_rate": 1.8659364478770257e-06,
|
| 10456 |
+
"loss": 43.2845,
|
| 10457 |
+
"step": 1488
|
| 10458 |
+
},
|
| 10459 |
+
{
|
| 10460 |
+
"epoch": 0.14768895060503867,
|
| 10461 |
+
"grad_norm": 2.12724232673645,
|
| 10462 |
+
"learning_rate": 1.8277788742365965e-06,
|
| 10463 |
+
"loss": 43.0592,
|
| 10464 |
+
"step": 1489
|
| 10465 |
+
},
|
| 10466 |
+
{
|
| 10467 |
+
"epoch": 0.14778813727435033,
|
| 10468 |
+
"grad_norm": 2.0481293201446533,
|
| 10469 |
+
"learning_rate": 1.790011895976118e-06,
|
| 10470 |
+
"loss": 43.1987,
|
| 10471 |
+
"step": 1490
|
| 10472 |
+
},
|
| 10473 |
+
{
|
| 10474 |
+
"epoch": 0.14788732394366197,
|
| 10475 |
+
"grad_norm": 2.0809874534606934,
|
| 10476 |
+
"learning_rate": 1.7526356633581597e-06,
|
| 10477 |
+
"loss": 43.3618,
|
| 10478 |
+
"step": 1491
|
| 10479 |
+
},
|
| 10480 |
+
{
|
| 10481 |
+
"epoch": 0.1479865106129736,
|
| 10482 |
+
"grad_norm": 1.8907266855239868,
|
| 10483 |
+
"learning_rate": 1.7156503250905898e-06,
|
| 10484 |
+
"loss": 43.1199,
|
| 10485 |
+
"step": 1492
|
| 10486 |
+
},
|
| 10487 |
+
{
|
| 10488 |
+
"epoch": 0.14808569728228527,
|
| 10489 |
+
"grad_norm": 1.9915417432785034,
|
| 10490 |
+
"learning_rate": 1.6790560283260648e-06,
|
| 10491 |
+
"loss": 43.2897,
|
| 10492 |
+
"step": 1493
|
| 10493 |
+
},
|
| 10494 |
+
{
|
| 10495 |
+
"epoch": 0.1481848839515969,
|
| 10496 |
+
"grad_norm": 2.7075445652008057,
|
| 10497 |
+
"learning_rate": 1.6428529186614195e-06,
|
| 10498 |
+
"loss": 43.4989,
|
| 10499 |
+
"step": 1494
|
| 10500 |
+
},
|
| 10501 |
+
{
|
| 10502 |
+
"epoch": 0.14828407062090854,
|
| 10503 |
+
"grad_norm": 2.030494451522827,
|
| 10504 |
+
"learning_rate": 1.6070411401370334e-06,
|
| 10505 |
+
"loss": 43.0845,
|
| 10506 |
+
"step": 1495
|
| 10507 |
+
},
|
| 10508 |
+
{
|
| 10509 |
+
"epoch": 0.1483832572902202,
|
| 10510 |
+
"grad_norm": 2.1013498306274414,
|
| 10511 |
+
"learning_rate": 1.571620835236376e-06,
|
| 10512 |
+
"loss": 43.5171,
|
| 10513 |
+
"step": 1496
|
| 10514 |
+
},
|
| 10515 |
+
{
|
| 10516 |
+
"epoch": 0.14848244395953183,
|
| 10517 |
+
"grad_norm": 2.019134521484375,
|
| 10518 |
+
"learning_rate": 1.5365921448853181e-06,
|
| 10519 |
+
"loss": 43.3562,
|
| 10520 |
+
"step": 1497
|
| 10521 |
+
},
|
| 10522 |
+
{
|
| 10523 |
+
"epoch": 0.14858163062884347,
|
| 10524 |
+
"grad_norm": 2.0027365684509277,
|
| 10525 |
+
"learning_rate": 1.501955208451633e-06,
|
| 10526 |
+
"loss": 43.3551,
|
| 10527 |
+
"step": 1498
|
| 10528 |
+
},
|
| 10529 |
+
{
|
| 10530 |
+
"epoch": 0.14868081729815513,
|
| 10531 |
+
"grad_norm": 1.9935261011123657,
|
| 10532 |
+
"learning_rate": 1.4677101637444734e-06,
|
| 10533 |
+
"loss": 43.0962,
|
| 10534 |
+
"step": 1499
|
| 10535 |
+
},
|
| 10536 |
+
{
|
| 10537 |
+
"epoch": 0.14878000396746677,
|
| 10538 |
+
"grad_norm": 1.6672489643096924,
|
| 10539 |
+
"learning_rate": 1.4338571470137063e-06,
|
| 10540 |
+
"loss": 43.124,
|
| 10541 |
+
"step": 1500
|
| 10542 |
+
},
|
| 10543 |
+
{
|
| 10544 |
+
"epoch": 0.14887919063677843,
|
| 10545 |
+
"grad_norm": 1.8993000984191895,
|
| 10546 |
+
"learning_rate": 1.400396292949513e-06,
|
| 10547 |
+
"loss": 43.2808,
|
| 10548 |
+
"step": 1501
|
| 10549 |
+
},
|
| 10550 |
+
{
|
| 10551 |
+
"epoch": 0.14897837730609007,
|
| 10552 |
+
"grad_norm": 2.341925859451294,
|
| 10553 |
+
"learning_rate": 1.367327734681756e-06,
|
| 10554 |
+
"loss": 43.1186,
|
| 10555 |
+
"step": 1502
|
| 10556 |
+
},
|
| 10557 |
+
{
|
| 10558 |
+
"epoch": 0.1490775639754017,
|
| 10559 |
+
"grad_norm": 2.009824275970459,
|
| 10560 |
+
"learning_rate": 1.334651603779491e-06,
|
| 10561 |
+
"loss": 43.378,
|
| 10562 |
+
"step": 1503
|
| 10563 |
+
},
|
| 10564 |
+
{
|
| 10565 |
+
"epoch": 0.14917675064471336,
|
| 10566 |
+
"grad_norm": 1.925865650177002,
|
| 10567 |
+
"learning_rate": 1.3023680302504338e-06,
|
| 10568 |
+
"loss": 43.632,
|
| 10569 |
+
"step": 1504
|
| 10570 |
+
},
|
| 10571 |
+
{
|
| 10572 |
+
"epoch": 0.149275937314025,
|
| 10573 |
+
"grad_norm": 2.3476693630218506,
|
| 10574 |
+
"learning_rate": 1.2704771425404382e-06,
|
| 10575 |
+
"loss": 43.2814,
|
| 10576 |
+
"step": 1505
|
| 10577 |
+
},
|
| 10578 |
+
{
|
| 10579 |
+
"epoch": 0.14937512398333663,
|
| 10580 |
+
"grad_norm": 2.0322370529174805,
|
| 10581 |
+
"learning_rate": 1.2389790675329748e-06,
|
| 10582 |
+
"loss": 43.3698,
|
| 10583 |
+
"step": 1506
|
| 10584 |
+
},
|
| 10585 |
+
{
|
| 10586 |
+
"epoch": 0.1494743106526483,
|
| 10587 |
+
"grad_norm": 2.102142095565796,
|
| 10588 |
+
"learning_rate": 1.207873930548653e-06,
|
| 10589 |
+
"loss": 43.3187,
|
| 10590 |
+
"step": 1507
|
| 10591 |
+
},
|
| 10592 |
+
{
|
| 10593 |
+
"epoch": 0.14957349732195993,
|
| 10594 |
+
"grad_norm": 2.274484395980835,
|
| 10595 |
+
"learning_rate": 1.1771618553447216e-06,
|
| 10596 |
+
"loss": 43.1627,
|
| 10597 |
+
"step": 1508
|
| 10598 |
+
},
|
| 10599 |
+
{
|
| 10600 |
+
"epoch": 0.14967268399127157,
|
| 10601 |
+
"grad_norm": 1.8524329662322998,
|
| 10602 |
+
"learning_rate": 1.1468429641145472e-06,
|
| 10603 |
+
"loss": 43.1895,
|
| 10604 |
+
"step": 1509
|
| 10605 |
+
},
|
| 10606 |
+
{
|
| 10607 |
+
"epoch": 0.14977187066058323,
|
| 10608 |
+
"grad_norm": 2.1060593128204346,
|
| 10609 |
+
"learning_rate": 1.1169173774871478e-06,
|
| 10610 |
+
"loss": 43.2468,
|
| 10611 |
+
"step": 1510
|
| 10612 |
+
},
|
| 10613 |
+
{
|
| 10614 |
+
"epoch": 0.14987105732989486,
|
| 10615 |
+
"grad_norm": 2.0444247722625732,
|
| 10616 |
+
"learning_rate": 1.0873852145267148e-06,
|
| 10617 |
+
"loss": 43.4034,
|
| 10618 |
+
"step": 1511
|
| 10619 |
+
},
|
| 10620 |
+
{
|
| 10621 |
+
"epoch": 0.1499702439992065,
|
| 10622 |
+
"grad_norm": 1.883766770362854,
|
| 10623 |
+
"learning_rate": 1.0582465927321373e-06,
|
| 10624 |
+
"loss": 43.2545,
|
| 10625 |
+
"step": 1512
|
| 10626 |
+
},
|
| 10627 |
+
{
|
| 10628 |
+
"epoch": 0.15006943066851816,
|
| 10629 |
+
"grad_norm": 1.9731440544128418,
|
| 10630 |
+
"learning_rate": 1.0295016280365112e-06,
|
| 10631 |
+
"loss": 43.3445,
|
| 10632 |
+
"step": 1513
|
| 10633 |
+
},
|
| 10634 |
+
{
|
| 10635 |
+
"epoch": 0.1501686173378298,
|
| 10636 |
+
"grad_norm": 2.490894317626953,
|
| 10637 |
+
"learning_rate": 1.0011504348067303e-06,
|
| 10638 |
+
"loss": 43.4394,
|
| 10639 |
+
"step": 1514
|
| 10640 |
+
},
|
| 10641 |
+
{
|
| 10642 |
+
"epoch": 0.15026780400714143,
|
| 10643 |
+
"grad_norm": 2.189347505569458,
|
| 10644 |
+
"learning_rate": 9.731931258429638e-07,
|
| 10645 |
+
"loss": 43.324,
|
| 10646 |
+
"step": 1515
|
| 10647 |
+
},
|
| 10648 |
+
{
|
| 10649 |
+
"epoch": 0.1503669906764531,
|
| 10650 |
+
"grad_norm": 2.066657066345215,
|
| 10651 |
+
"learning_rate": 9.456298123782902e-07,
|
| 10652 |
+
"loss": 43.1968,
|
| 10653 |
+
"step": 1516
|
| 10654 |
+
},
|
| 10655 |
+
{
|
| 10656 |
+
"epoch": 0.15046617734576473,
|
| 10657 |
+
"grad_norm": 1.8031419515609741,
|
| 10658 |
+
"learning_rate": 9.18460604078164e-07,
|
| 10659 |
+
"loss": 43.6127,
|
| 10660 |
+
"step": 1517
|
| 10661 |
+
},
|
| 10662 |
+
{
|
| 10663 |
+
"epoch": 0.15056536401507636,
|
| 10664 |
+
"grad_norm": 2.2260806560516357,
|
| 10665 |
+
"learning_rate": 8.916856090400383e-07,
|
| 10666 |
+
"loss": 43.149,
|
| 10667 |
+
"step": 1518
|
| 10668 |
+
},
|
| 10669 |
+
{
|
| 10670 |
+
"epoch": 0.15066455068438803,
|
| 10671 |
+
"grad_norm": 2.0802321434020996,
|
| 10672 |
+
"learning_rate": 8.65304933792932e-07,
|
| 10673 |
+
"loss": 43.2184,
|
| 10674 |
+
"step": 1519
|
| 10675 |
+
},
|
| 10676 |
+
{
|
| 10677 |
+
"epoch": 0.15076373735369966,
|
| 10678 |
+
"grad_norm": 2.15700101852417,
|
| 10679 |
+
"learning_rate": 8.393186832969746e-07,
|
| 10680 |
+
"loss": 43.3825,
|
| 10681 |
+
"step": 1520
|
| 10682 |
+
},
|
| 10683 |
+
{
|
| 10684 |
+
"epoch": 0.1508629240230113,
|
| 10685 |
+
"grad_norm": 1.812803864479065,
|
| 10686 |
+
"learning_rate": 8.137269609430176e-07,
|
| 10687 |
+
"loss": 43.4337,
|
| 10688 |
+
"step": 1521
|
| 10689 |
+
},
|
| 10690 |
+
{
|
| 10691 |
+
"epoch": 0.15096211069232296,
|
| 10692 |
+
"grad_norm": 1.882633090019226,
|
| 10693 |
+
"learning_rate": 7.885298685522235e-07,
|
| 10694 |
+
"loss": 42.9118,
|
| 10695 |
+
"step": 1522
|
| 10696 |
+
},
|
| 10697 |
+
{
|
| 10698 |
+
"epoch": 0.1510612973616346,
|
| 10699 |
+
"grad_norm": 2.022883653640747,
|
| 10700 |
+
"learning_rate": 7.637275063756111e-07,
|
| 10701 |
+
"loss": 43.3613,
|
| 10702 |
+
"step": 1523
|
| 10703 |
+
},
|
| 10704 |
+
{
|
| 10705 |
+
"epoch": 0.15116048403094623,
|
| 10706 |
+
"grad_norm": 2.25357985496521,
|
| 10707 |
+
"learning_rate": 7.393199730937439e-07,
|
| 10708 |
+
"loss": 43.1635,
|
| 10709 |
+
"step": 1524
|
| 10710 |
+
},
|
| 10711 |
+
{
|
| 10712 |
+
"epoch": 0.1512596707002579,
|
| 10713 |
+
"grad_norm": 1.771923303604126,
|
| 10714 |
+
"learning_rate": 7.153073658162646e-07,
|
| 10715 |
+
"loss": 43.1556,
|
| 10716 |
+
"step": 1525
|
| 10717 |
+
},
|
| 10718 |
+
{
|
| 10719 |
+
"epoch": 0.15135885736956953,
|
| 10720 |
+
"grad_norm": 2.2379488945007324,
|
| 10721 |
+
"learning_rate": 6.916897800815503e-07,
|
| 10722 |
+
"loss": 43.3821,
|
| 10723 |
+
"step": 1526
|
| 10724 |
+
},
|
| 10725 |
+
{
|
| 10726 |
+
"epoch": 0.15145804403888116,
|
| 10727 |
+
"grad_norm": 1.9636152982711792,
|
| 10728 |
+
"learning_rate": 6.684673098562799e-07,
|
| 10729 |
+
"loss": 43.3311,
|
| 10730 |
+
"step": 1527
|
| 10731 |
+
},
|
| 10732 |
+
{
|
| 10733 |
+
"epoch": 0.15155723070819282,
|
| 10734 |
+
"grad_norm": 1.5640300512313843,
|
| 10735 |
+
"learning_rate": 6.456400475351232e-07,
|
| 10736 |
+
"loss": 43.4346,
|
| 10737 |
+
"step": 1528
|
| 10738 |
+
},
|
| 10739 |
+
{
|
| 10740 |
+
"epoch": 0.15165641737750446,
|
| 10741 |
+
"grad_norm": 2.089597463607788,
|
| 10742 |
+
"learning_rate": 6.232080839403631e-07,
|
| 10743 |
+
"loss": 43.3784,
|
| 10744 |
+
"step": 1529
|
| 10745 |
+
},
|
| 10746 |
+
{
|
| 10747 |
+
"epoch": 0.1517556040468161,
|
| 10748 |
+
"grad_norm": 1.941697120666504,
|
| 10749 |
+
"learning_rate": 6.011715083214741e-07,
|
| 10750 |
+
"loss": 43.24,
|
| 10751 |
+
"step": 1530
|
| 10752 |
+
},
|
| 10753 |
+
{
|
| 10754 |
+
"epoch": 0.15185479071612776,
|
| 10755 |
+
"grad_norm": 1.754721760749817,
|
| 10756 |
+
"learning_rate": 5.795304083548559e-07,
|
| 10757 |
+
"loss": 43.5555,
|
| 10758 |
+
"step": 1531
|
| 10759 |
+
},
|
| 10760 |
+
{
|
| 10761 |
+
"epoch": 0.1519539773854394,
|
| 10762 |
+
"grad_norm": 2.025624990463257,
|
| 10763 |
+
"learning_rate": 5.582848701433885e-07,
|
| 10764 |
+
"loss": 43.1827,
|
| 10765 |
+
"step": 1532
|
| 10766 |
+
},
|
| 10767 |
+
{
|
| 10768 |
+
"epoch": 0.15205316405475103,
|
| 10769 |
+
"grad_norm": 1.8338382244110107,
|
| 10770 |
+
"learning_rate": 5.37434978216178e-07,
|
| 10771 |
+
"loss": 43.4506,
|
| 10772 |
+
"step": 1533
|
| 10773 |
+
},
|
| 10774 |
+
{
|
| 10775 |
+
"epoch": 0.1521523507240627,
|
| 10776 |
+
"grad_norm": 2.0147147178649902,
|
| 10777 |
+
"learning_rate": 5.169808155281786e-07,
|
| 10778 |
+
"loss": 43.1065,
|
| 10779 |
+
"step": 1534
|
| 10780 |
+
},
|
| 10781 |
+
{
|
| 10782 |
+
"epoch": 0.15225153739337433,
|
| 10783 |
+
"grad_norm": 1.9887562990188599,
|
| 10784 |
+
"learning_rate": 4.969224634598591e-07,
|
| 10785 |
+
"loss": 43.3896,
|
| 10786 |
+
"step": 1535
|
| 10787 |
+
},
|
| 10788 |
+
{
|
| 10789 |
+
"epoch": 0.152350724062686,
|
| 10790 |
+
"grad_norm": 2.161041736602783,
|
| 10791 |
+
"learning_rate": 4.772600018168816e-07,
|
| 10792 |
+
"loss": 43.3768,
|
| 10793 |
+
"step": 1536
|
| 10794 |
+
},
|
| 10795 |
+
{
|
| 10796 |
+
"epoch": 0.15244991073199762,
|
| 10797 |
+
"grad_norm": 1.8491780757904053,
|
| 10798 |
+
"learning_rate": 4.579935088298015e-07,
|
| 10799 |
+
"loss": 43.5216,
|
| 10800 |
+
"step": 1537
|
| 10801 |
+
},
|
| 10802 |
+
{
|
| 10803 |
+
"epoch": 0.15254909740130926,
|
| 10804 |
+
"grad_norm": 1.8649126291275024,
|
| 10805 |
+
"learning_rate": 4.3912306115372337e-07,
|
| 10806 |
+
"loss": 43.2825,
|
| 10807 |
+
"step": 1538
|
| 10808 |
+
},
|
| 10809 |
+
{
|
| 10810 |
+
"epoch": 0.15264828407062092,
|
| 10811 |
+
"grad_norm": 2.234464406967163,
|
| 10812 |
+
"learning_rate": 4.2064873386804535e-07,
|
| 10813 |
+
"loss": 43.3656,
|
| 10814 |
+
"step": 1539
|
| 10815 |
+
},
|
| 10816 |
+
{
|
| 10817 |
+
"epoch": 0.15274747073993256,
|
| 10818 |
+
"grad_norm": 2.0051584243774414,
|
| 10819 |
+
"learning_rate": 4.025706004760932e-07,
|
| 10820 |
+
"loss": 43.3919,
|
| 10821 |
+
"step": 1540
|
| 10822 |
+
},
|
| 10823 |
+
{
|
| 10824 |
+
"epoch": 0.1528466574092442,
|
| 10825 |
+
"grad_norm": 1.7327877283096313,
|
| 10826 |
+
"learning_rate": 3.8488873290492e-07,
|
| 10827 |
+
"loss": 43.3121,
|
| 10828 |
+
"step": 1541
|
| 10829 |
+
},
|
| 10830 |
+
{
|
| 10831 |
+
"epoch": 0.15294584407855585,
|
| 10832 |
+
"grad_norm": 2.157071590423584,
|
| 10833 |
+
"learning_rate": 3.676032015048958e-07,
|
| 10834 |
+
"loss": 42.7969,
|
| 10835 |
+
"step": 1542
|
| 10836 |
+
},
|
| 10837 |
+
{
|
| 10838 |
+
"epoch": 0.1530450307478675,
|
| 10839 |
+
"grad_norm": 2.0114893913269043,
|
| 10840 |
+
"learning_rate": 3.50714075049563e-07,
|
| 10841 |
+
"loss": 43.1662,
|
| 10842 |
+
"step": 1543
|
| 10843 |
+
},
|
| 10844 |
+
{
|
| 10845 |
+
"epoch": 0.15314421741717912,
|
| 10846 |
+
"grad_norm": 1.8396193981170654,
|
| 10847 |
+
"learning_rate": 3.342214207352701e-07,
|
| 10848 |
+
"loss": 43.0054,
|
| 10849 |
+
"step": 1544
|
| 10850 |
+
},
|
| 10851 |
+
{
|
| 10852 |
+
"epoch": 0.1532434040864908,
|
| 10853 |
+
"grad_norm": 1.8815864324569702,
|
| 10854 |
+
"learning_rate": 3.1812530418090513e-07,
|
| 10855 |
+
"loss": 43.4512,
|
| 10856 |
+
"step": 1545
|
| 10857 |
+
},
|
| 10858 |
+
{
|
| 10859 |
+
"epoch": 0.15334259075580242,
|
| 10860 |
+
"grad_norm": 1.6995435953140259,
|
| 10861 |
+
"learning_rate": 3.0242578942771825e-07,
|
| 10862 |
+
"loss": 43.5231,
|
| 10863 |
+
"step": 1546
|
| 10864 |
+
},
|
| 10865 |
+
{
|
| 10866 |
+
"epoch": 0.15344177742511406,
|
| 10867 |
+
"grad_norm": 2.0662453174591064,
|
| 10868 |
+
"learning_rate": 2.8712293893896626e-07,
|
| 10869 |
+
"loss": 43.5079,
|
| 10870 |
+
"step": 1547
|
| 10871 |
+
},
|
| 10872 |
+
{
|
| 10873 |
+
"epoch": 0.15354096409442572,
|
| 10874 |
+
"grad_norm": 1.7273828983306885,
|
| 10875 |
+
"learning_rate": 2.7221681359971274e-07,
|
| 10876 |
+
"loss": 43.223,
|
| 10877 |
+
"step": 1548
|
| 10878 |
+
},
|
| 10879 |
+
{
|
| 10880 |
+
"epoch": 0.15364015076373735,
|
| 10881 |
+
"grad_norm": 2.156456232070923,
|
| 10882 |
+
"learning_rate": 2.577074727165951e-07,
|
| 10883 |
+
"loss": 43.4248,
|
| 10884 |
+
"step": 1549
|
| 10885 |
+
},
|
| 10886 |
+
{
|
| 10887 |
+
"epoch": 0.153739337433049,
|
| 10888 |
+
"grad_norm": 1.5300558805465698,
|
| 10889 |
+
"learning_rate": 2.4359497401758024e-07,
|
| 10890 |
+
"loss": 43.3322,
|
| 10891 |
+
"step": 1550
|
| 10892 |
+
},
|
| 10893 |
+
{
|
| 10894 |
+
"epoch": 0.15383852410236065,
|
| 10895 |
+
"grad_norm": 1.8874156475067139,
|
| 10896 |
+
"learning_rate": 2.2987937365169798e-07,
|
| 10897 |
+
"loss": 43.1676,
|
| 10898 |
+
"step": 1551
|
| 10899 |
+
},
|
| 10900 |
+
{
|
| 10901 |
+
"epoch": 0.1539377107716723,
|
| 10902 |
+
"grad_norm": 2.02668833732605,
|
| 10903 |
+
"learning_rate": 2.1656072618887468e-07,
|
| 10904 |
+
"loss": 43.1283,
|
| 10905 |
+
"step": 1552
|
| 10906 |
+
},
|
| 10907 |
+
{
|
| 10908 |
+
"epoch": 0.15403689744098392,
|
| 10909 |
+
"grad_norm": 1.804421305656433,
|
| 10910 |
+
"learning_rate": 2.0363908461967784e-07,
|
| 10911 |
+
"loss": 43.3719,
|
| 10912 |
+
"step": 1553
|
| 10913 |
+
},
|
| 10914 |
+
{
|
| 10915 |
+
"epoch": 0.15413608411029558,
|
| 10916 |
+
"grad_norm": 1.910282850265503,
|
| 10917 |
+
"learning_rate": 1.9111450035513846e-07,
|
| 10918 |
+
"loss": 43.2325,
|
| 10919 |
+
"step": 1554
|
| 10920 |
+
},
|
| 10921 |
+
{
|
| 10922 |
+
"epoch": 0.15423527077960722,
|
| 10923 |
+
"grad_norm": 1.7048523426055908,
|
| 10924 |
+
"learning_rate": 1.7898702322648453e-07,
|
| 10925 |
+
"loss": 43.3534,
|
| 10926 |
+
"step": 1555
|
| 10927 |
+
},
|
| 10928 |
+
{
|
| 10929 |
+
"epoch": 0.15433445744891885,
|
| 10930 |
+
"grad_norm": 2.2636475563049316,
|
| 10931 |
+
"learning_rate": 1.6725670148503015e-07,
|
| 10932 |
+
"loss": 43.2414,
|
| 10933 |
+
"step": 1556
|
| 10934 |
+
},
|
| 10935 |
+
{
|
| 10936 |
+
"epoch": 0.15443364411823052,
|
| 10937 |
+
"grad_norm": 1.868412971496582,
|
| 10938 |
+
"learning_rate": 1.5592358180189782e-07,
|
| 10939 |
+
"loss": 43.3065,
|
| 10940 |
+
"step": 1557
|
| 10941 |
+
},
|
| 10942 |
+
{
|
| 10943 |
+
"epoch": 0.15453283078754215,
|
| 10944 |
+
"grad_norm": 1.893938422203064,
|
| 10945 |
+
"learning_rate": 1.449877092679075e-07,
|
| 10946 |
+
"loss": 43.2891,
|
| 10947 |
+
"step": 1558
|
| 10948 |
+
},
|
| 10949 |
+
{
|
| 10950 |
+
"epoch": 0.1546320174568538,
|
| 10951 |
+
"grad_norm": 1.8168120384216309,
|
| 10952 |
+
"learning_rate": 1.3444912739333237e-07,
|
| 10953 |
+
"loss": 43.2643,
|
| 10954 |
+
"step": 1559
|
| 10955 |
+
},
|
| 10956 |
+
{
|
| 10957 |
+
"epoch": 0.15473120412616545,
|
| 10958 |
+
"grad_norm": 1.7623488903045654,
|
| 10959 |
+
"learning_rate": 1.2430787810776555e-07,
|
| 10960 |
+
"loss": 43.0322,
|
| 10961 |
+
"step": 1560
|
| 10962 |
+
},
|
| 10963 |
+
{
|
| 10964 |
+
"epoch": 0.15483039079547709,
|
| 10965 |
+
"grad_norm": 1.9052917957305908,
|
| 10966 |
+
"learning_rate": 1.1456400175994252e-07,
|
| 10967 |
+
"loss": 43.0037,
|
| 10968 |
+
"step": 1561
|
| 10969 |
+
},
|
| 10970 |
+
{
|
| 10971 |
+
"epoch": 0.15492957746478872,
|
| 10972 |
+
"grad_norm": 1.8220422267913818,
|
| 10973 |
+
"learning_rate": 1.0521753711759674e-07,
|
| 10974 |
+
"loss": 43.1282,
|
| 10975 |
+
"step": 1562
|
| 10976 |
+
},
|
| 10977 |
+
{
|
| 10978 |
+
"epoch": 0.15502876413410038,
|
| 10979 |
+
"grad_norm": 1.9215062856674194,
|
| 10980 |
+
"learning_rate": 9.626852136725984e-08,
|
| 10981 |
+
"loss": 43.1229,
|
| 10982 |
+
"step": 1563
|
| 10983 |
+
},
|
| 10984 |
+
{
|
| 10985 |
+
"epoch": 0.15512795080341202,
|
| 10986 |
+
"grad_norm": 1.848051905632019,
|
| 10987 |
+
"learning_rate": 8.771699011416168e-08,
|
| 10988 |
+
"loss": 42.9966,
|
| 10989 |
+
"step": 1564
|
| 10990 |
+
},
|
| 10991 |
+
{
|
| 10992 |
+
"epoch": 0.15522713747272365,
|
| 10993 |
+
"grad_norm": 2.0624942779541016,
|
| 10994 |
+
"learning_rate": 7.956297738207497e-08,
|
| 10995 |
+
"loss": 43.0968,
|
| 10996 |
+
"step": 1565
|
| 10997 |
+
},
|
| 10998 |
+
{
|
| 10999 |
+
"epoch": 0.15532632414203532,
|
| 11000 |
+
"grad_norm": 2.1140987873077393,
|
| 11001 |
+
"learning_rate": 7.180651561315977e-08,
|
| 11002 |
+
"loss": 43.1196,
|
| 11003 |
+
"step": 1566
|
| 11004 |
+
},
|
| 11005 |
+
{
|
| 11006 |
+
"epoch": 0.15542551081134695,
|
| 11007 |
+
"grad_norm": 1.8665432929992676,
|
| 11008 |
+
"learning_rate": 6.444763566786361e-08,
|
| 11009 |
+
"loss": 43.1564,
|
| 11010 |
+
"step": 1567
|
| 11011 |
+
},
|
| 11012 |
+
{
|
| 11013 |
+
"epoch": 0.15552469748065859,
|
| 11014 |
+
"grad_norm": 2.1170995235443115,
|
| 11015 |
+
"learning_rate": 5.748636682477715e-08,
|
| 11016 |
+
"loss": 43.377,
|
| 11017 |
+
"step": 1568
|
| 11018 |
+
},
|
| 11019 |
+
{
|
| 11020 |
+
"epoch": 0.15562388414997025,
|
| 11021 |
+
"grad_norm": 2.1143836975097656,
|
| 11022 |
+
"learning_rate": 5.092273678052317e-08,
|
| 11023 |
+
"loss": 43.3935,
|
| 11024 |
+
"step": 1569
|
| 11025 |
+
},
|
| 11026 |
+
{
|
| 11027 |
+
"epoch": 0.15572307081928188,
|
| 11028 |
+
"grad_norm": 2.0807676315307617,
|
| 11029 |
+
"learning_rate": 4.475677164966774e-08,
|
| 11030 |
+
"loss": 43.0398,
|
| 11031 |
+
"step": 1570
|
| 11032 |
+
},
|
| 11033 |
+
{
|
| 11034 |
+
"epoch": 0.15582225748859355,
|
| 11035 |
+
"grad_norm": 1.85593843460083,
|
| 11036 |
+
"learning_rate": 3.898849596456478e-08,
|
| 11037 |
+
"loss": 43.3593,
|
| 11038 |
+
"step": 1571
|
| 11039 |
+
},
|
| 11040 |
+
{
|
| 11041 |
+
"epoch": 0.15592144415790518,
|
| 11042 |
+
"grad_norm": 1.962047815322876,
|
| 11043 |
+
"learning_rate": 3.361793267532276e-08,
|
| 11044 |
+
"loss": 43.4117,
|
| 11045 |
+
"step": 1572
|
| 11046 |
+
},
|
| 11047 |
+
{
|
| 11048 |
+
"epoch": 0.15602063082721682,
|
| 11049 |
+
"grad_norm": 2.188950777053833,
|
| 11050 |
+
"learning_rate": 2.86451031496604e-08,
|
| 11051 |
+
"loss": 43.2674,
|
| 11052 |
+
"step": 1573
|
| 11053 |
+
},
|
| 11054 |
+
{
|
| 11055 |
+
"epoch": 0.15611981749652848,
|
| 11056 |
+
"grad_norm": 1.7976933717727661,
|
| 11057 |
+
"learning_rate": 2.4070027172851117e-08,
|
| 11058 |
+
"loss": 43.5485,
|
| 11059 |
+
"step": 1574
|
| 11060 |
+
},
|
| 11061 |
+
{
|
| 11062 |
+
"epoch": 0.15621900416584011,
|
| 11063 |
+
"grad_norm": 1.911242127418518,
|
| 11064 |
+
"learning_rate": 1.9892722947645326e-08,
|
| 11065 |
+
"loss": 43.3611,
|
| 11066 |
+
"step": 1575
|
| 11067 |
+
},
|
| 11068 |
+
{
|
| 11069 |
+
"epoch": 0.15631819083515175,
|
| 11070 |
+
"grad_norm": 2.212848424911499,
|
| 11071 |
+
"learning_rate": 1.6113207094181626e-08,
|
| 11072 |
+
"loss": 43.4165,
|
| 11073 |
+
"step": 1576
|
| 11074 |
+
},
|
| 11075 |
+
{
|
| 11076 |
+
"epoch": 0.1564173775044634,
|
| 11077 |
+
"grad_norm": 1.8893673419952393,
|
| 11078 |
+
"learning_rate": 1.2731494649909081e-08,
|
| 11079 |
+
"loss": 43.1338,
|
| 11080 |
+
"step": 1577
|
| 11081 |
+
},
|
| 11082 |
+
{
|
| 11083 |
+
"epoch": 0.15651656417377505,
|
| 11084 |
+
"grad_norm": 2.001990795135498,
|
| 11085 |
+
"learning_rate": 9.747599069576119e-09,
|
| 11086 |
+
"loss": 43.1382,
|
| 11087 |
+
"step": 1578
|
| 11088 |
+
},
|
| 11089 |
+
{
|
| 11090 |
+
"epoch": 0.15661575084308668,
|
| 11091 |
+
"grad_norm": 1.522186279296875,
|
| 11092 |
+
"learning_rate": 7.161532225130607e-09,
|
| 11093 |
+
"loss": 43.4232,
|
| 11094 |
+
"step": 1579
|
| 11095 |
+
},
|
| 11096 |
+
{
|
| 11097 |
+
"epoch": 0.15671493751239834,
|
| 11098 |
+
"grad_norm": 2.2063798904418945,
|
| 11099 |
+
"learning_rate": 4.973304405697654e-09,
|
| 11100 |
+
"loss": 43.4284,
|
| 11101 |
+
"step": 1580
|
| 11102 |
+
},
|
| 11103 |
+
{
|
| 11104 |
+
"epoch": 0.15681412418170998,
|
| 11105 |
+
"grad_norm": 1.7803454399108887,
|
| 11106 |
+
"learning_rate": 3.182924317512992e-09,
|
| 11107 |
+
"loss": 43.1111,
|
| 11108 |
+
"step": 1581
|
| 11109 |
+
},
|
| 11110 |
+
{
|
| 11111 |
+
"epoch": 0.15691331085102161,
|
| 11112 |
+
"grad_norm": 2.2368690967559814,
|
| 11113 |
+
"learning_rate": 1.7903990839229779e-09,
|
| 11114 |
+
"loss": 43.4775,
|
| 11115 |
+
"step": 1582
|
| 11116 |
+
},
|
| 11117 |
+
{
|
| 11118 |
+
"epoch": 0.15701249752033328,
|
| 11119 |
+
"grad_norm": 1.8889573812484741,
|
| 11120 |
+
"learning_rate": 7.95734245340185e-10,
|
| 11121 |
+
"loss": 43.3141,
|
| 11122 |
+
"step": 1583
|
| 11123 |
+
},
|
| 11124 |
+
{
|
| 11125 |
+
"epoch": 0.1571116841896449,
|
| 11126 |
+
"grad_norm": 1.7237956523895264,
|
| 11127 |
+
"learning_rate": 1.9893375921009595e-10,
|
| 11128 |
+
"loss": 43.4533,
|
| 11129 |
+
"step": 1584
|
| 11130 |
+
},
|
| 11131 |
+
{
|
| 11132 |
+
"epoch": 0.15721087085895655,
|
| 11133 |
+
"grad_norm": 1.6221884489059448,
|
| 11134 |
+
"learning_rate": 0.0,
|
| 11135 |
+
"loss": 43.3652,
|
| 11136 |
+
"step": 1585
|
| 11137 |
}
|
| 11138 |
],
|
| 11139 |
"logging_steps": 1,
|
|
|
|
| 11148 |
"should_evaluate": false,
|
| 11149 |
"should_log": false,
|
| 11150 |
"should_save": true,
|
| 11151 |
+
"should_training_stop": true
|
| 11152 |
},
|
| 11153 |
"attributes": {}
|
| 11154 |
}
|
| 11155 |
},
|
| 11156 |
+
"total_flos": 15397926469632.0,
|
| 11157 |
"train_batch_size": 2,
|
| 11158 |
"trial_name": null,
|
| 11159 |
"trial_params": null
|