Training in progress, step 1588, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 80013120
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eb863bed6a7c164521191dfb75cad33fc19d20d65710f4f3affbe13cc71f749c
|
| 3 |
size 80013120
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 41120084
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:81eed029f4c9c5e39b0aec6fd10d7d589cfc9afb04b3a1f6dcb028a862eac5b7
|
| 3 |
size 41120084
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:148820ba0668e5da8846e1393379d9b6e11600d8b4318a9f7156aed90260a3e2
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:defeb9ef8cdd685910c10cc6564d4128ee785f1cb589efa38e65694d45921bec
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 397,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -8376,6 +8376,2793 @@
|
|
| 8376 |
"eval_samples_per_second": 17.881,
|
| 8377 |
"eval_steps_per_second": 8.944,
|
| 8378 |
"step": 1191
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8379 |
}
|
| 8380 |
],
|
| 8381 |
"logging_steps": 1,
|
|
@@ -8390,12 +11177,12 @@
|
|
| 8390 |
"should_evaluate": false,
|
| 8391 |
"should_log": false,
|
| 8392 |
"should_save": true,
|
| 8393 |
-
"should_training_stop":
|
| 8394 |
},
|
| 8395 |
"attributes": {}
|
| 8396 |
}
|
| 8397 |
},
|
| 8398 |
-
"total_flos":
|
| 8399 |
"train_batch_size": 2,
|
| 8400 |
"trial_name": null,
|
| 8401 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.2641713453940528,
|
| 5 |
"eval_steps": 397,
|
| 6 |
+
"global_step": 1588,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 8376 |
"eval_samples_per_second": 17.881,
|
| 8377 |
"eval_steps_per_second": 8.944,
|
| 8378 |
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.1982948637970472,
|
| 8382 |
+
"grad_norm": 1.6232105493545532,
|
| 8383 |
+
"learning_rate": 2.9500800504489022e-05,
|
| 8384 |
+
"loss": 5.2932,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.19846121854855478,
|
| 8389 |
+
"grad_norm": 2.141390323638916,
|
| 8390 |
+
"learning_rate": 2.9359744782243302e-05,
|
| 8391 |
+
"loss": 6.3036,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.1986275733000624,
|
| 8396 |
+
"grad_norm": 1.661794662475586,
|
| 8397 |
+
"learning_rate": 2.921896904699539e-05,
|
| 8398 |
+
"loss": 5.5686,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.19879392805156998,
|
| 8403 |
+
"grad_norm": 1.2751030921936035,
|
| 8404 |
+
"learning_rate": 2.9078473856718636e-05,
|
| 8405 |
+
"loss": 5.2277,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.19896028280307756,
|
| 8410 |
+
"grad_norm": 1.3397148847579956,
|
| 8411 |
+
"learning_rate": 2.8938259768274355e-05,
|
| 8412 |
+
"loss": 5.6426,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.19912663755458515,
|
| 8417 |
+
"grad_norm": 1.3956557512283325,
|
| 8418 |
+
"learning_rate": 2.8798327337409658e-05,
|
| 8419 |
+
"loss": 4.9241,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.19929299230609274,
|
| 8424 |
+
"grad_norm": 1.4062037467956543,
|
| 8425 |
+
"learning_rate": 2.8658677118755382e-05,
|
| 8426 |
+
"loss": 5.294,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.19945934705760032,
|
| 8431 |
+
"grad_norm": 1.2887972593307495,
|
| 8432 |
+
"learning_rate": 2.8519309665823734e-05,
|
| 8433 |
+
"loss": 5.8716,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.19962570180910794,
|
| 8438 |
+
"grad_norm": 1.4394385814666748,
|
| 8439 |
+
"learning_rate": 2.8380225531006233e-05,
|
| 8440 |
+
"loss": 6.1407,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.19979205656061552,
|
| 8445 |
+
"grad_norm": 1.6875256299972534,
|
| 8446 |
+
"learning_rate": 2.824142526557142e-05,
|
| 8447 |
+
"loss": 6.1755,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.1999584113121231,
|
| 8452 |
+
"grad_norm": 1.2772835493087769,
|
| 8453 |
+
"learning_rate": 2.8102909419662616e-05,
|
| 8454 |
+
"loss": 6.0966,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.2001247660636307,
|
| 8459 |
+
"grad_norm": 1.3359897136688232,
|
| 8460 |
+
"learning_rate": 2.796467854229594e-05,
|
| 8461 |
+
"loss": 5.5629,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.20029112081513828,
|
| 8466 |
+
"grad_norm": 1.3850102424621582,
|
| 8467 |
+
"learning_rate": 2.7826733181357932e-05,
|
| 8468 |
+
"loss": 5.8447,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.20045747556664587,
|
| 8473 |
+
"grad_norm": 1.6023699045181274,
|
| 8474 |
+
"learning_rate": 2.76890738836036e-05,
|
| 8475 |
+
"loss": 5.8633,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.20062383031815345,
|
| 8480 |
+
"grad_norm": 1.818352460861206,
|
| 8481 |
+
"learning_rate": 2.755170119465402e-05,
|
| 8482 |
+
"loss": 5.1515,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.20079018506966106,
|
| 8487 |
+
"grad_norm": 1.3524521589279175,
|
| 8488 |
+
"learning_rate": 2.741461565899426e-05,
|
| 8489 |
+
"loss": 5.5836,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.20095653982116865,
|
| 8494 |
+
"grad_norm": 1.3133295774459839,
|
| 8495 |
+
"learning_rate": 2.7277817819971242e-05,
|
| 8496 |
+
"loss": 4.9128,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.20112289457267624,
|
| 8501 |
+
"grad_norm": 1.3486051559448242,
|
| 8502 |
+
"learning_rate": 2.7141308219791706e-05,
|
| 8503 |
+
"loss": 5.7566,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.20128924932418382,
|
| 8508 |
+
"grad_norm": 1.273048758506775,
|
| 8509 |
+
"learning_rate": 2.7005087399519835e-05,
|
| 8510 |
+
"loss": 5.9008,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.2014556040756914,
|
| 8515 |
+
"grad_norm": 1.2358012199401855,
|
| 8516 |
+
"learning_rate": 2.6869155899075184e-05,
|
| 8517 |
+
"loss": 4.8449,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.201621958827199,
|
| 8522 |
+
"grad_norm": 1.3681429624557495,
|
| 8523 |
+
"learning_rate": 2.673351425723064e-05,
|
| 8524 |
+
"loss": 6.0803,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.20178831357870658,
|
| 8529 |
+
"grad_norm": 1.7087936401367188,
|
| 8530 |
+
"learning_rate": 2.6598163011610177e-05,
|
| 8531 |
+
"loss": 5.713,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.2019546683302142,
|
| 8536 |
+
"grad_norm": 1.5574787855148315,
|
| 8537 |
+
"learning_rate": 2.6463102698686825e-05,
|
| 8538 |
+
"loss": 5.8689,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.20212102308172178,
|
| 8543 |
+
"grad_norm": 1.4936617612838745,
|
| 8544 |
+
"learning_rate": 2.6328333853780453e-05,
|
| 8545 |
+
"loss": 6.1069,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.20228737783322936,
|
| 8550 |
+
"grad_norm": 1.3248683214187622,
|
| 8551 |
+
"learning_rate": 2.6193857011055622e-05,
|
| 8552 |
+
"loss": 5.6671,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.20245373258473695,
|
| 8557 |
+
"grad_norm": 1.388630986213684,
|
| 8558 |
+
"learning_rate": 2.605967270351959e-05,
|
| 8559 |
+
"loss": 5.5237,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.20262008733624454,
|
| 8564 |
+
"grad_norm": 1.243695855140686,
|
| 8565 |
+
"learning_rate": 2.592578146302008e-05,
|
| 8566 |
+
"loss": 6.5819,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.20278644208775212,
|
| 8571 |
+
"grad_norm": 1.2897069454193115,
|
| 8572 |
+
"learning_rate": 2.5792183820243332e-05,
|
| 8573 |
+
"loss": 6.34,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.2029527968392597,
|
| 8578 |
+
"grad_norm": 1.4721264839172363,
|
| 8579 |
+
"learning_rate": 2.565888030471183e-05,
|
| 8580 |
+
"loss": 5.714,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.20311915159076732,
|
| 8585 |
+
"grad_norm": 1.2373559474945068,
|
| 8586 |
+
"learning_rate": 2.5525871444782177e-05,
|
| 8587 |
+
"loss": 4.4295,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.2032855063422749,
|
| 8592 |
+
"grad_norm": 1.3331725597381592,
|
| 8593 |
+
"learning_rate": 2.5393157767643228e-05,
|
| 8594 |
+
"loss": 5.4134,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.2034518610937825,
|
| 8599 |
+
"grad_norm": 1.482886791229248,
|
| 8600 |
+
"learning_rate": 2.526073979931376e-05,
|
| 8601 |
+
"loss": 5.5239,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.20361821584529008,
|
| 8606 |
+
"grad_norm": 1.459428071975708,
|
| 8607 |
+
"learning_rate": 2.512861806464063e-05,
|
| 8608 |
+
"loss": 5.5429,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.20378457059679767,
|
| 8613 |
+
"grad_norm": 1.3316020965576172,
|
| 8614 |
+
"learning_rate": 2.499679308729639e-05,
|
| 8615 |
+
"loss": 6.2716,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.20395092534830525,
|
| 8620 |
+
"grad_norm": 1.7475497722625732,
|
| 8621 |
+
"learning_rate": 2.486526538977745e-05,
|
| 8622 |
+
"loss": 5.2194,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.20411728009981286,
|
| 8627 |
+
"grad_norm": 1.3625974655151367,
|
| 8628 |
+
"learning_rate": 2.4734035493401953e-05,
|
| 8629 |
+
"loss": 5.4701,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.20428363485132045,
|
| 8634 |
+
"grad_norm": 1.3647065162658691,
|
| 8635 |
+
"learning_rate": 2.4603103918307625e-05,
|
| 8636 |
+
"loss": 5.4991,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.20444998960282804,
|
| 8641 |
+
"grad_norm": 1.9600995779037476,
|
| 8642 |
+
"learning_rate": 2.447247118344992e-05,
|
| 8643 |
+
"loss": 5.5445,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.20461634435433562,
|
| 8648 |
+
"grad_norm": 1.1997805833816528,
|
| 8649 |
+
"learning_rate": 2.434213780659962e-05,
|
| 8650 |
+
"loss": 5.2069,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.2047826991058432,
|
| 8655 |
+
"grad_norm": 1.1434640884399414,
|
| 8656 |
+
"learning_rate": 2.4212104304341122e-05,
|
| 8657 |
+
"loss": 4.7765,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.2049490538573508,
|
| 8662 |
+
"grad_norm": 1.4303185939788818,
|
| 8663 |
+
"learning_rate": 2.408237119207022e-05,
|
| 8664 |
+
"loss": 6.0497,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.20511540860885838,
|
| 8669 |
+
"grad_norm": 1.2942795753479004,
|
| 8670 |
+
"learning_rate": 2.3952938983992055e-05,
|
| 8671 |
+
"loss": 5.3223,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.205281763360366,
|
| 8676 |
+
"grad_norm": 1.442844271659851,
|
| 8677 |
+
"learning_rate": 2.3823808193119178e-05,
|
| 8678 |
+
"loss": 6.1224,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.20544811811187358,
|
| 8683 |
+
"grad_norm": 1.4841268062591553,
|
| 8684 |
+
"learning_rate": 2.3694979331269417e-05,
|
| 8685 |
+
"loss": 6.4459,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.20561447286338116,
|
| 8690 |
+
"grad_norm": 1.2687065601348877,
|
| 8691 |
+
"learning_rate": 2.3566452909063885e-05,
|
| 8692 |
+
"loss": 5.881,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.20578082761488875,
|
| 8697 |
+
"grad_norm": 1.423063039779663,
|
| 8698 |
+
"learning_rate": 2.3438229435924952e-05,
|
| 8699 |
+
"loss": 5.82,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.20594718236639634,
|
| 8704 |
+
"grad_norm": 1.423080563545227,
|
| 8705 |
+
"learning_rate": 2.3310309420074305e-05,
|
| 8706 |
+
"loss": 5.3107,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.20611353711790392,
|
| 8711 |
+
"grad_norm": 1.270208716392517,
|
| 8712 |
+
"learning_rate": 2.3182693368530718e-05,
|
| 8713 |
+
"loss": 5.2911,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.2062798918694115,
|
| 8718 |
+
"grad_norm": 1.176243543624878,
|
| 8719 |
+
"learning_rate": 2.305538178710831e-05,
|
| 8720 |
+
"loss": 5.1497,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.20644624662091912,
|
| 8725 |
+
"grad_norm": 1.4276049137115479,
|
| 8726 |
+
"learning_rate": 2.2928375180414342e-05,
|
| 8727 |
+
"loss": 5.3658,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.2066126013724267,
|
| 8732 |
+
"grad_norm": 1.3212155103683472,
|
| 8733 |
+
"learning_rate": 2.2801674051847298e-05,
|
| 8734 |
+
"loss": 5.5832,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.2067789561239343,
|
| 8739 |
+
"grad_norm": 1.5570040941238403,
|
| 8740 |
+
"learning_rate": 2.2675278903594988e-05,
|
| 8741 |
+
"loss": 5.8335,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.20694531087544188,
|
| 8746 |
+
"grad_norm": 2.0657362937927246,
|
| 8747 |
+
"learning_rate": 2.2549190236632268e-05,
|
| 8748 |
+
"loss": 4.2874,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.20711166562694946,
|
| 8753 |
+
"grad_norm": 1.2278870344161987,
|
| 8754 |
+
"learning_rate": 2.242340855071935e-05,
|
| 8755 |
+
"loss": 5.068,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.20727802037845705,
|
| 8760 |
+
"grad_norm": 1.6545754671096802,
|
| 8761 |
+
"learning_rate": 2.2297934344399695e-05,
|
| 8762 |
+
"loss": 7.2917,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.20744437512996464,
|
| 8767 |
+
"grad_norm": 1.39810311794281,
|
| 8768 |
+
"learning_rate": 2.2172768114998022e-05,
|
| 8769 |
+
"loss": 5.8273,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.20761072988147225,
|
| 8774 |
+
"grad_norm": 1.4509820938110352,
|
| 8775 |
+
"learning_rate": 2.2047910358618405e-05,
|
| 8776 |
+
"loss": 5.7493,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.20777708463297984,
|
| 8781 |
+
"grad_norm": 1.3449739217758179,
|
| 8782 |
+
"learning_rate": 2.192336157014223e-05,
|
| 8783 |
+
"loss": 5.4697,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.20794343938448742,
|
| 8788 |
+
"grad_norm": 1.4670015573501587,
|
| 8789 |
+
"learning_rate": 2.179912224322629e-05,
|
| 8790 |
+
"loss": 5.6578,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.208109794135995,
|
| 8795 |
+
"grad_norm": 1.3220630884170532,
|
| 8796 |
+
"learning_rate": 2.1675192870300797e-05,
|
| 8797 |
+
"loss": 6.6637,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.2082761488875026,
|
| 8802 |
+
"grad_norm": 1.4806134700775146,
|
| 8803 |
+
"learning_rate": 2.155157394256745e-05,
|
| 8804 |
+
"loss": 5.7748,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.20844250363901018,
|
| 8809 |
+
"grad_norm": 1.2802047729492188,
|
| 8810 |
+
"learning_rate": 2.1428265949997463e-05,
|
| 8811 |
+
"loss": 5.7504,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.2086088583905178,
|
| 8816 |
+
"grad_norm": 1.2848188877105713,
|
| 8817 |
+
"learning_rate": 2.130526938132966e-05,
|
| 8818 |
+
"loss": 5.6146,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.20877521314202538,
|
| 8823 |
+
"grad_norm": 1.6816967725753784,
|
| 8824 |
+
"learning_rate": 2.118258472406851e-05,
|
| 8825 |
+
"loss": 6.1801,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.20894156789353296,
|
| 8830 |
+
"grad_norm": 1.5058279037475586,
|
| 8831 |
+
"learning_rate": 2.1060212464482197e-05,
|
| 8832 |
+
"loss": 5.818,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.20910792264504055,
|
| 8837 |
+
"grad_norm": 1.345757246017456,
|
| 8838 |
+
"learning_rate": 2.093815308760071e-05,
|
| 8839 |
+
"loss": 5.48,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.20927427739654814,
|
| 8844 |
+
"grad_norm": 1.3328275680541992,
|
| 8845 |
+
"learning_rate": 2.0816407077213896e-05,
|
| 8846 |
+
"loss": 5.8052,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.20944063214805572,
|
| 8851 |
+
"grad_norm": 1.574367642402649,
|
| 8852 |
+
"learning_rate": 2.0694974915869536e-05,
|
| 8853 |
+
"loss": 5.6451,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.2096069868995633,
|
| 8858 |
+
"grad_norm": 1.4146779775619507,
|
| 8859 |
+
"learning_rate": 2.0573857084871507e-05,
|
| 8860 |
+
"loss": 5.4235,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.20977334165107092,
|
| 8865 |
+
"grad_norm": 1.5349353551864624,
|
| 8866 |
+
"learning_rate": 2.0453054064277756e-05,
|
| 8867 |
+
"loss": 5.4909,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.2099396964025785,
|
| 8872 |
+
"grad_norm": 1.966155767440796,
|
| 8873 |
+
"learning_rate": 2.0332566332898507e-05,
|
| 8874 |
+
"loss": 5.0158,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.2101060511540861,
|
| 8879 |
+
"grad_norm": 1.4259071350097656,
|
| 8880 |
+
"learning_rate": 2.0212394368294286e-05,
|
| 8881 |
+
"loss": 5.0265,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.21027240590559368,
|
| 8886 |
+
"grad_norm": 1.3615598678588867,
|
| 8887 |
+
"learning_rate": 2.0092538646774072e-05,
|
| 8888 |
+
"loss": 6.2554,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.21043876065710126,
|
| 8893 |
+
"grad_norm": 2.0183749198913574,
|
| 8894 |
+
"learning_rate": 1.9972999643393386e-05,
|
| 8895 |
+
"loss": 5.8037,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.21060511540860885,
|
| 8900 |
+
"grad_norm": 1.4518505334854126,
|
| 8901 |
+
"learning_rate": 1.9853777831952437e-05,
|
| 8902 |
+
"loss": 5.9251,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.21077147016011644,
|
| 8907 |
+
"grad_norm": 1.3328274488449097,
|
| 8908 |
+
"learning_rate": 1.9734873684994205e-05,
|
| 8909 |
+
"loss": 5.2111,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.21093782491162405,
|
| 8914 |
+
"grad_norm": 2.285362720489502,
|
| 8915 |
+
"learning_rate": 1.9616287673802568e-05,
|
| 8916 |
+
"loss": 5.3803,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.21110417966313164,
|
| 8921 |
+
"grad_norm": 1.3630973100662231,
|
| 8922 |
+
"learning_rate": 1.949802026840052e-05,
|
| 8923 |
+
"loss": 4.8715,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.21127053441463922,
|
| 8928 |
+
"grad_norm": 1.7096298933029175,
|
| 8929 |
+
"learning_rate": 1.938007193754816e-05,
|
| 8930 |
+
"loss": 6.0174,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.2114368891661468,
|
| 8935 |
+
"grad_norm": 1.2987428903579712,
|
| 8936 |
+
"learning_rate": 1.9262443148740983e-05,
|
| 8937 |
+
"loss": 5.946,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.2116032439176544,
|
| 8942 |
+
"grad_norm": 1.4257386922836304,
|
| 8943 |
+
"learning_rate": 1.9145134368207916e-05,
|
| 8944 |
+
"loss": 4.9977,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.21176959866916198,
|
| 8949 |
+
"grad_norm": 1.9782564640045166,
|
| 8950 |
+
"learning_rate": 1.9028146060909523e-05,
|
| 8951 |
+
"loss": 6.2552,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.21193595342066956,
|
| 8956 |
+
"grad_norm": 1.427051067352295,
|
| 8957 |
+
"learning_rate": 1.8911478690536177e-05,
|
| 8958 |
+
"loss": 5.231,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.21210230817217718,
|
| 8963 |
+
"grad_norm": 1.2214199304580688,
|
| 8964 |
+
"learning_rate": 1.879513271950616e-05,
|
| 8965 |
+
"loss": 4.8163,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.21226866292368476,
|
| 8970 |
+
"grad_norm": 1.1599147319793701,
|
| 8971 |
+
"learning_rate": 1.8679108608963903e-05,
|
| 8972 |
+
"loss": 5.1718,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.21243501767519235,
|
| 8977 |
+
"grad_norm": 1.22577702999115,
|
| 8978 |
+
"learning_rate": 1.8563406818778085e-05,
|
| 8979 |
+
"loss": 5.3885,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.21260137242669994,
|
| 8984 |
+
"grad_norm": 1.2197672128677368,
|
| 8985 |
+
"learning_rate": 1.844802780753989e-05,
|
| 8986 |
+
"loss": 5.5086,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.21276772717820752,
|
| 8991 |
+
"grad_norm": 1.537932276725769,
|
| 8992 |
+
"learning_rate": 1.8332972032561124e-05,
|
| 8993 |
+
"loss": 5.8123,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.2129340819297151,
|
| 8998 |
+
"grad_norm": 1.391782522201538,
|
| 8999 |
+
"learning_rate": 1.8218239949872428e-05,
|
| 9000 |
+
"loss": 5.9071,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.2131004366812227,
|
| 9005 |
+
"grad_norm": 1.4246690273284912,
|
| 9006 |
+
"learning_rate": 1.8103832014221468e-05,
|
| 9007 |
+
"loss": 5.4198,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.2132667914327303,
|
| 9012 |
+
"grad_norm": 2.2898967266082764,
|
| 9013 |
+
"learning_rate": 1.7989748679071138e-05,
|
| 9014 |
+
"loss": 5.5294,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.2134331461842379,
|
| 9019 |
+
"grad_norm": 1.500938892364502,
|
| 9020 |
+
"learning_rate": 1.7875990396597752e-05,
|
| 9021 |
+
"loss": 5.6257,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.21359950093574548,
|
| 9026 |
+
"grad_norm": 1.7106680870056152,
|
| 9027 |
+
"learning_rate": 1.7762557617689267e-05,
|
| 9028 |
+
"loss": 5.7937,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.21376585568725306,
|
| 9033 |
+
"grad_norm": 1.511987328529358,
|
| 9034 |
+
"learning_rate": 1.764945079194349e-05,
|
| 9035 |
+
"loss": 6.334,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.21393221043876065,
|
| 9040 |
+
"grad_norm": 1.2296509742736816,
|
| 9041 |
+
"learning_rate": 1.7536670367666264e-05,
|
| 9042 |
+
"loss": 5.0222,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.21409856519026824,
|
| 9047 |
+
"grad_norm": 1.6023799180984497,
|
| 9048 |
+
"learning_rate": 1.7424216791869763e-05,
|
| 9049 |
+
"loss": 4.9031,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.21426491994177585,
|
| 9054 |
+
"grad_norm": 2.15262508392334,
|
| 9055 |
+
"learning_rate": 1.7312090510270627e-05,
|
| 9056 |
+
"loss": 5.6437,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.21443127469328344,
|
| 9061 |
+
"grad_norm": 1.35624098777771,
|
| 9062 |
+
"learning_rate": 1.7200291967288296e-05,
|
| 9063 |
+
"loss": 6.1928,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.21459762944479102,
|
| 9068 |
+
"grad_norm": 1.326606273651123,
|
| 9069 |
+
"learning_rate": 1.7088821606043148e-05,
|
| 9070 |
+
"loss": 5.3117,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.2147639841962986,
|
| 9075 |
+
"grad_norm": 1.386781096458435,
|
| 9076 |
+
"learning_rate": 1.6977679868354844e-05,
|
| 9077 |
+
"loss": 5.5758,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.2149303389478062,
|
| 9082 |
+
"grad_norm": 1.2637791633605957,
|
| 9083 |
+
"learning_rate": 1.68668671947405e-05,
|
| 9084 |
+
"loss": 4.874,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.21509669369931378,
|
| 9089 |
+
"grad_norm": 1.7423834800720215,
|
| 9090 |
+
"learning_rate": 1.675638402441294e-05,
|
| 9091 |
+
"loss": 4.6793,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.21526304845082136,
|
| 9096 |
+
"grad_norm": 1.198981761932373,
|
| 9097 |
+
"learning_rate": 1.6646230795279026e-05,
|
| 9098 |
+
"loss": 5.1138,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.21542940320232898,
|
| 9103 |
+
"grad_norm": 1.5077391862869263,
|
| 9104 |
+
"learning_rate": 1.653640794393785e-05,
|
| 9105 |
+
"loss": 5.5779,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.21559575795383656,
|
| 9110 |
+
"grad_norm": 2.1747539043426514,
|
| 9111 |
+
"learning_rate": 1.6426915905679053e-05,
|
| 9112 |
+
"loss": 5.4591,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.21576211270534415,
|
| 9117 |
+
"grad_norm": 1.4635599851608276,
|
| 9118 |
+
"learning_rate": 1.6317755114481058e-05,
|
| 9119 |
+
"loss": 7.2079,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.21592846745685174,
|
| 9124 |
+
"grad_norm": 1.2033299207687378,
|
| 9125 |
+
"learning_rate": 1.6208926003009394e-05,
|
| 9126 |
+
"loss": 5.009,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.21609482220835932,
|
| 9131 |
+
"grad_norm": 1.2838571071624756,
|
| 9132 |
+
"learning_rate": 1.610042900261488e-05,
|
| 9133 |
+
"loss": 5.1545,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.2162611769598669,
|
| 9138 |
+
"grad_norm": 1.731187343597412,
|
| 9139 |
+
"learning_rate": 1.5992264543332124e-05,
|
| 9140 |
+
"loss": 6.7372,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.2164275317113745,
|
| 9145 |
+
"grad_norm": 1.2437002658843994,
|
| 9146 |
+
"learning_rate": 1.588443305387759e-05,
|
| 9147 |
+
"loss": 6.0491,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.2165938864628821,
|
| 9152 |
+
"grad_norm": 1.3229238986968994,
|
| 9153 |
+
"learning_rate": 1.5776934961648005e-05,
|
| 9154 |
+
"loss": 5.7803,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.2167602412143897,
|
| 9159 |
+
"grad_norm": 1.2940232753753662,
|
| 9160 |
+
"learning_rate": 1.566977069271872e-05,
|
| 9161 |
+
"loss": 5.758,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.21692659596589728,
|
| 9166 |
+
"grad_norm": 1.3176711797714233,
|
| 9167 |
+
"learning_rate": 1.556294067184182e-05,
|
| 9168 |
+
"loss": 5.6269,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.21709295071740486,
|
| 9173 |
+
"grad_norm": 1.3167920112609863,
|
| 9174 |
+
"learning_rate": 1.5456445322444745e-05,
|
| 9175 |
+
"loss": 5.2948,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.21725930546891245,
|
| 9180 |
+
"grad_norm": 1.2418887615203857,
|
| 9181 |
+
"learning_rate": 1.5350285066628343e-05,
|
| 9182 |
+
"loss": 5.9618,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.21742566022042004,
|
| 9187 |
+
"grad_norm": 1.5438908338546753,
|
| 9188 |
+
"learning_rate": 1.5244460325165311e-05,
|
| 9189 |
+
"loss": 5.5876,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.21759201497192762,
|
| 9194 |
+
"grad_norm": 2.281704902648926,
|
| 9195 |
+
"learning_rate": 1.5138971517498524e-05,
|
| 9196 |
+
"loss": 5.3735,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.21775836972343524,
|
| 9201 |
+
"grad_norm": 1.4625532627105713,
|
| 9202 |
+
"learning_rate": 1.5033819061739373e-05,
|
| 9203 |
+
"loss": 5.7448,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.21792472447494282,
|
| 9208 |
+
"grad_norm": 1.3492189645767212,
|
| 9209 |
+
"learning_rate": 1.4929003374666073e-05,
|
| 9210 |
+
"loss": 6.5311,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.2180910792264504,
|
| 9215 |
+
"grad_norm": 1.2679107189178467,
|
| 9216 |
+
"learning_rate": 1.4824524871722046e-05,
|
| 9217 |
+
"loss": 5.3872,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.218257433977958,
|
| 9222 |
+
"grad_norm": 1.3351328372955322,
|
| 9223 |
+
"learning_rate": 1.4720383967014306e-05,
|
| 9224 |
+
"loss": 5.8061,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.21842378872946558,
|
| 9229 |
+
"grad_norm": 1.3560147285461426,
|
| 9230 |
+
"learning_rate": 1.4616581073311663e-05,
|
| 9231 |
+
"loss": 4.8224,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.21859014348097316,
|
| 9236 |
+
"grad_norm": 1.2801884412765503,
|
| 9237 |
+
"learning_rate": 1.451311660204333e-05,
|
| 9238 |
+
"loss": 5.5258,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.21875649823248078,
|
| 9243 |
+
"grad_norm": 1.2850334644317627,
|
| 9244 |
+
"learning_rate": 1.4409990963297093e-05,
|
| 9245 |
+
"loss": 6.0771,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.21892285298398836,
|
| 9250 |
+
"grad_norm": 1.5097272396087646,
|
| 9251 |
+
"learning_rate": 1.4307204565817755e-05,
|
| 9252 |
+
"loss": 5.6169,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.21908920773549595,
|
| 9257 |
+
"grad_norm": 1.3600345849990845,
|
| 9258 |
+
"learning_rate": 1.4204757817005566e-05,
|
| 9259 |
+
"loss": 5.4378,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.21925556248700354,
|
| 9264 |
+
"grad_norm": 1.2716408967971802,
|
| 9265 |
+
"learning_rate": 1.4102651122914434e-05,
|
| 9266 |
+
"loss": 4.8635,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.21942191723851112,
|
| 9271 |
+
"grad_norm": 1.4233225584030151,
|
| 9272 |
+
"learning_rate": 1.4000884888250598e-05,
|
| 9273 |
+
"loss": 6.2514,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.2195882719900187,
|
| 9278 |
+
"grad_norm": 1.9059834480285645,
|
| 9279 |
+
"learning_rate": 1.3899459516370772e-05,
|
| 9280 |
+
"loss": 5.3482,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.2197546267415263,
|
| 9285 |
+
"grad_norm": 1.2136225700378418,
|
| 9286 |
+
"learning_rate": 1.379837540928065e-05,
|
| 9287 |
+
"loss": 5.3432,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.2199209814930339,
|
| 9292 |
+
"grad_norm": 1.3530998229980469,
|
| 9293 |
+
"learning_rate": 1.3697632967633344e-05,
|
| 9294 |
+
"loss": 5.5692,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.2200873362445415,
|
| 9299 |
+
"grad_norm": 1.283886194229126,
|
| 9300 |
+
"learning_rate": 1.3597232590727638e-05,
|
| 9301 |
+
"loss": 5.6595,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.22025369099604908,
|
| 9306 |
+
"grad_norm": 1.5503801107406616,
|
| 9307 |
+
"learning_rate": 1.3497174676506674e-05,
|
| 9308 |
+
"loss": 5.9143,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.22042004574755666,
|
| 9313 |
+
"grad_norm": 2.6419754028320312,
|
| 9314 |
+
"learning_rate": 1.339745962155613e-05,
|
| 9315 |
+
"loss": 5.4356,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.22058640049906425,
|
| 9320 |
+
"grad_norm": 1.3269411325454712,
|
| 9321 |
+
"learning_rate": 1.3298087821102789e-05,
|
| 9322 |
+
"loss": 5.1117,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.22075275525057184,
|
| 9327 |
+
"grad_norm": 1.2497272491455078,
|
| 9328 |
+
"learning_rate": 1.319905966901286e-05,
|
| 9329 |
+
"loss": 5.5323,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.22091911000207942,
|
| 9334 |
+
"grad_norm": 1.6322333812713623,
|
| 9335 |
+
"learning_rate": 1.310037555779049e-05,
|
| 9336 |
+
"loss": 5.1779,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.22108546475358704,
|
| 9341 |
+
"grad_norm": 1.5000379085540771,
|
| 9342 |
+
"learning_rate": 1.300203587857629e-05,
|
| 9343 |
+
"loss": 5.0426,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.22125181950509462,
|
| 9348 |
+
"grad_norm": 1.2270927429199219,
|
| 9349 |
+
"learning_rate": 1.2904041021145596e-05,
|
| 9350 |
+
"loss": 5.5344,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.2214181742566022,
|
| 9355 |
+
"grad_norm": 1.2086352109909058,
|
| 9356 |
+
"learning_rate": 1.2806391373907089e-05,
|
| 9357 |
+
"loss": 5.087,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.2215845290081098,
|
| 9362 |
+
"grad_norm": 2.7081668376922607,
|
| 9363 |
+
"learning_rate": 1.2709087323901104e-05,
|
| 9364 |
+
"loss": 5.7246,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.22175088375961738,
|
| 9369 |
+
"grad_norm": 1.6704657077789307,
|
| 9370 |
+
"learning_rate": 1.2612129256798221e-05,
|
| 9371 |
+
"loss": 6.1261,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.22191723851112496,
|
| 9376 |
+
"grad_norm": 1.3136470317840576,
|
| 9377 |
+
"learning_rate": 1.2515517556897772e-05,
|
| 9378 |
+
"loss": 5.6954,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.22208359326263255,
|
| 9383 |
+
"grad_norm": 1.248659372329712,
|
| 9384 |
+
"learning_rate": 1.241925260712612e-05,
|
| 9385 |
+
"loss": 6.0346,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.22224994801414016,
|
| 9390 |
+
"grad_norm": 1.389736533164978,
|
| 9391 |
+
"learning_rate": 1.2323334789035367e-05,
|
| 9392 |
+
"loss": 5.6601,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.22241630276564775,
|
| 9397 |
+
"grad_norm": 1.2835826873779297,
|
| 9398 |
+
"learning_rate": 1.2227764482801607e-05,
|
| 9399 |
+
"loss": 5.0758,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.22258265751715534,
|
| 9404 |
+
"grad_norm": 1.2967400550842285,
|
| 9405 |
+
"learning_rate": 1.2132542067223685e-05,
|
| 9406 |
+
"loss": 5.6498,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.22274901226866292,
|
| 9411 |
+
"grad_norm": 1.3642213344573975,
|
| 9412 |
+
"learning_rate": 1.2037667919721506e-05,
|
| 9413 |
+
"loss": 7.0193,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.2229153670201705,
|
| 9418 |
+
"grad_norm": 1.4066728353500366,
|
| 9419 |
+
"learning_rate": 1.1943142416334596e-05,
|
| 9420 |
+
"loss": 5.512,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.2230817217716781,
|
| 9425 |
+
"grad_norm": 1.507125973701477,
|
| 9426 |
+
"learning_rate": 1.1848965931720569e-05,
|
| 9427 |
+
"loss": 5.9659,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.2232480765231857,
|
| 9432 |
+
"grad_norm": 1.264590859413147,
|
| 9433 |
+
"learning_rate": 1.17551388391537e-05,
|
| 9434 |
+
"loss": 5.4538,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.2234144312746933,
|
| 9439 |
+
"grad_norm": 1.489986538887024,
|
| 9440 |
+
"learning_rate": 1.1661661510523502e-05,
|
| 9441 |
+
"loss": 6.089,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.22358078602620088,
|
| 9446 |
+
"grad_norm": 1.177182912826538,
|
| 9447 |
+
"learning_rate": 1.1568534316333101e-05,
|
| 9448 |
+
"loss": 5.433,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.22374714077770846,
|
| 9453 |
+
"grad_norm": 1.4035717248916626,
|
| 9454 |
+
"learning_rate": 1.1475757625697858e-05,
|
| 9455 |
+
"loss": 5.4822,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.22391349552921605,
|
| 9460 |
+
"grad_norm": 1.5316627025604248,
|
| 9461 |
+
"learning_rate": 1.1383331806343878e-05,
|
| 9462 |
+
"loss": 5.1412,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.22407985028072364,
|
| 9467 |
+
"grad_norm": 1.3394558429718018,
|
| 9468 |
+
"learning_rate": 1.1291257224606577e-05,
|
| 9469 |
+
"loss": 5.4539,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.22424620503223122,
|
| 9474 |
+
"grad_norm": 1.6843971014022827,
|
| 9475 |
+
"learning_rate": 1.1199534245429255e-05,
|
| 9476 |
+
"loss": 5.9262,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.22441255978373884,
|
| 9481 |
+
"grad_norm": 1.4021657705307007,
|
| 9482 |
+
"learning_rate": 1.1108163232361602e-05,
|
| 9483 |
+
"loss": 6.4741,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.22457891453524642,
|
| 9488 |
+
"grad_norm": 1.3044997453689575,
|
| 9489 |
+
"learning_rate": 1.1017144547558178e-05,
|
| 9490 |
+
"loss": 5.7573,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.224745269286754,
|
| 9495 |
+
"grad_norm": 1.582127571105957,
|
| 9496 |
+
"learning_rate": 1.0926478551777197e-05,
|
| 9497 |
+
"loss": 5.952,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.2249116240382616,
|
| 9502 |
+
"grad_norm": 4.40924596786499,
|
| 9503 |
+
"learning_rate": 1.0836165604378868e-05,
|
| 9504 |
+
"loss": 5.2732,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.22507797878976918,
|
| 9509 |
+
"grad_norm": 1.3366684913635254,
|
| 9510 |
+
"learning_rate": 1.074620606332416e-05,
|
| 9511 |
+
"loss": 6.1963,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.22524433354127676,
|
| 9516 |
+
"grad_norm": 1.2196955680847168,
|
| 9517 |
+
"learning_rate": 1.0656600285173258e-05,
|
| 9518 |
+
"loss": 5.1985,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.22541068829278435,
|
| 9523 |
+
"grad_norm": 1.2815883159637451,
|
| 9524 |
+
"learning_rate": 1.0567348625084127e-05,
|
| 9525 |
+
"loss": 5.8084,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.22557704304429196,
|
| 9530 |
+
"grad_norm": 1.446434497833252,
|
| 9531 |
+
"learning_rate": 1.0478451436811232e-05,
|
| 9532 |
+
"loss": 5.4819,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.22574339779579955,
|
| 9537 |
+
"grad_norm": 1.7896093130111694,
|
| 9538 |
+
"learning_rate": 1.0389909072704041e-05,
|
| 9539 |
+
"loss": 5.7501,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.22590975254730714,
|
| 9544 |
+
"grad_norm": 1.8210922479629517,
|
| 9545 |
+
"learning_rate": 1.0301721883705683e-05,
|
| 9546 |
+
"loss": 5.7569,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.22607610729881472,
|
| 9551 |
+
"grad_norm": 1.5266560316085815,
|
| 9552 |
+
"learning_rate": 1.0213890219351518e-05,
|
| 9553 |
+
"loss": 5.6647,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.2262424620503223,
|
| 9558 |
+
"grad_norm": 1.5579383373260498,
|
| 9559 |
+
"learning_rate": 1.0126414427767717e-05,
|
| 9560 |
+
"loss": 6.8595,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.2264088168018299,
|
| 9565 |
+
"grad_norm": 1.285667896270752,
|
| 9566 |
+
"learning_rate": 1.0039294855669957e-05,
|
| 9567 |
+
"loss": 5.6548,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.22657517155333748,
|
| 9572 |
+
"grad_norm": 1.2159571647644043,
|
| 9573 |
+
"learning_rate": 9.952531848362057e-06,
|
| 9574 |
+
"loss": 5.1403,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.2267415263048451,
|
| 9579 |
+
"grad_norm": 1.4399012327194214,
|
| 9580 |
+
"learning_rate": 9.866125749734534e-06,
|
| 9581 |
+
"loss": 5.4949,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.22690788105635268,
|
| 9586 |
+
"grad_norm": 1.2201632261276245,
|
| 9587 |
+
"learning_rate": 9.780076902263247e-06,
|
| 9588 |
+
"loss": 6.0457,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.22707423580786026,
|
| 9593 |
+
"grad_norm": 1.8926656246185303,
|
| 9594 |
+
"learning_rate": 9.694385647008108e-06,
|
| 9595 |
+
"loss": 5.7579,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.22724059055936785,
|
| 9600 |
+
"grad_norm": 1.4774664640426636,
|
| 9601 |
+
"learning_rate": 9.609052323611666e-06,
|
| 9602 |
+
"loss": 5.701,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.22740694531087544,
|
| 9607 |
+
"grad_norm": 1.4885610342025757,
|
| 9608 |
+
"learning_rate": 9.524077270297837e-06,
|
| 9609 |
+
"loss": 5.0835,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.22757330006238302,
|
| 9614 |
+
"grad_norm": 1.2831387519836426,
|
| 9615 |
+
"learning_rate": 9.439460823870471e-06,
|
| 9616 |
+
"loss": 5.5437,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.22773965481389064,
|
| 9621 |
+
"grad_norm": 1.364233374595642,
|
| 9622 |
+
"learning_rate": 9.355203319712025e-06,
|
| 9623 |
+
"loss": 5.6714,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.22790600956539822,
|
| 9628 |
+
"grad_norm": 1.4607526063919067,
|
| 9629 |
+
"learning_rate": 9.271305091782312e-06,
|
| 9630 |
+
"loss": 5.6743,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.2280723643169058,
|
| 9635 |
+
"grad_norm": 1.3047035932540894,
|
| 9636 |
+
"learning_rate": 9.187766472617099e-06,
|
| 9637 |
+
"loss": 6.1156,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.2282387190684134,
|
| 9642 |
+
"grad_norm": 1.2246792316436768,
|
| 9643 |
+
"learning_rate": 9.104587793326901e-06,
|
| 9644 |
+
"loss": 5.9471,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.22840507381992098,
|
| 9649 |
+
"grad_norm": 1.345313549041748,
|
| 9650 |
+
"learning_rate": 9.021769383595502e-06,
|
| 9651 |
+
"loss": 5.7954,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.22857142857142856,
|
| 9656 |
+
"grad_norm": 1.6436344385147095,
|
| 9657 |
+
"learning_rate": 8.939311571678754e-06,
|
| 9658 |
+
"loss": 5.6346,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.22873778332293615,
|
| 9663 |
+
"grad_norm": 1.4638996124267578,
|
| 9664 |
+
"learning_rate": 8.85721468440327e-06,
|
| 9665 |
+
"loss": 5.3217,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.22890413807444376,
|
| 9670 |
+
"grad_norm": 1.4186853170394897,
|
| 9671 |
+
"learning_rate": 8.775479047165102e-06,
|
| 9672 |
+
"loss": 4.8422,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.22907049282595135,
|
| 9677 |
+
"grad_norm": 1.3675562143325806,
|
| 9678 |
+
"learning_rate": 8.69410498392853e-06,
|
| 9679 |
+
"loss": 5.28,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.22923684757745894,
|
| 9684 |
+
"grad_norm": 1.3891630172729492,
|
| 9685 |
+
"learning_rate": 8.613092817224611e-06,
|
| 9686 |
+
"loss": 5.3017,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.22940320232896652,
|
| 9691 |
+
"grad_norm": 1.227776050567627,
|
| 9692 |
+
"learning_rate": 8.53244286815006e-06,
|
| 9693 |
+
"loss": 4.819,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.2295695570804741,
|
| 9698 |
+
"grad_norm": 1.2639137506484985,
|
| 9699 |
+
"learning_rate": 8.452155456365918e-06,
|
| 9700 |
+
"loss": 4.0735,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.2297359118319817,
|
| 9705 |
+
"grad_norm": 1.4644341468811035,
|
| 9706 |
+
"learning_rate": 8.372230900096256e-06,
|
| 9707 |
+
"loss": 5.3508,
|
| 9708 |
+
"step": 1381
|
| 9709 |
+
},
|
| 9710 |
+
{
|
| 9711 |
+
"epoch": 0.22990226658348928,
|
| 9712 |
+
"grad_norm": 1.3387385606765747,
|
| 9713 |
+
"learning_rate": 8.292669516127039e-06,
|
| 9714 |
+
"loss": 5.5769,
|
| 9715 |
+
"step": 1382
|
| 9716 |
+
},
|
| 9717 |
+
{
|
| 9718 |
+
"epoch": 0.2300686213349969,
|
| 9719 |
+
"grad_norm": 1.3490971326828003,
|
| 9720 |
+
"learning_rate": 8.213471619804647e-06,
|
| 9721 |
+
"loss": 5.4467,
|
| 9722 |
+
"step": 1383
|
| 9723 |
+
},
|
| 9724 |
+
{
|
| 9725 |
+
"epoch": 0.23023497608650448,
|
| 9726 |
+
"grad_norm": 1.3893576860427856,
|
| 9727 |
+
"learning_rate": 8.134637525034839e-06,
|
| 9728 |
+
"loss": 5.4565,
|
| 9729 |
+
"step": 1384
|
| 9730 |
+
},
|
| 9731 |
+
{
|
| 9732 |
+
"epoch": 0.23040133083801206,
|
| 9733 |
+
"grad_norm": 1.9283785820007324,
|
| 9734 |
+
"learning_rate": 8.056167544281378e-06,
|
| 9735 |
+
"loss": 5.4716,
|
| 9736 |
+
"step": 1385
|
| 9737 |
+
},
|
| 9738 |
+
{
|
| 9739 |
+
"epoch": 0.23056768558951965,
|
| 9740 |
+
"grad_norm": 1.4280201196670532,
|
| 9741 |
+
"learning_rate": 7.978061988564867e-06,
|
| 9742 |
+
"loss": 5.7357,
|
| 9743 |
+
"step": 1386
|
| 9744 |
+
},
|
| 9745 |
+
{
|
| 9746 |
+
"epoch": 0.23073404034102724,
|
| 9747 |
+
"grad_norm": 1.155755877494812,
|
| 9748 |
+
"learning_rate": 7.900321167461523e-06,
|
| 9749 |
+
"loss": 5.3957,
|
| 9750 |
+
"step": 1387
|
| 9751 |
+
},
|
| 9752 |
+
{
|
| 9753 |
+
"epoch": 0.23090039509253482,
|
| 9754 |
+
"grad_norm": 1.4284088611602783,
|
| 9755 |
+
"learning_rate": 7.822945389101832e-06,
|
| 9756 |
+
"loss": 5.8997,
|
| 9757 |
+
"step": 1388
|
| 9758 |
+
},
|
| 9759 |
+
{
|
| 9760 |
+
"epoch": 0.2310667498440424,
|
| 9761 |
+
"grad_norm": 1.5528028011322021,
|
| 9762 |
+
"learning_rate": 7.745934960169454e-06,
|
| 9763 |
+
"loss": 5.5373,
|
| 9764 |
+
"step": 1389
|
| 9765 |
+
},
|
| 9766 |
+
{
|
| 9767 |
+
"epoch": 0.23123310459555002,
|
| 9768 |
+
"grad_norm": 1.3867017030715942,
|
| 9769 |
+
"learning_rate": 7.669290185899946e-06,
|
| 9770 |
+
"loss": 6.7341,
|
| 9771 |
+
"step": 1390
|
| 9772 |
+
},
|
| 9773 |
+
{
|
| 9774 |
+
"epoch": 0.2313994593470576,
|
| 9775 |
+
"grad_norm": 1.635018229484558,
|
| 9776 |
+
"learning_rate": 7.59301137007965e-06,
|
| 9777 |
+
"loss": 6.6894,
|
| 9778 |
+
"step": 1391
|
| 9779 |
+
},
|
| 9780 |
+
{
|
| 9781 |
+
"epoch": 0.2315658140985652,
|
| 9782 |
+
"grad_norm": 1.9656907320022583,
|
| 9783 |
+
"learning_rate": 7.517098815044288e-06,
|
| 9784 |
+
"loss": 5.4418,
|
| 9785 |
+
"step": 1392
|
| 9786 |
+
},
|
| 9787 |
+
{
|
| 9788 |
+
"epoch": 0.23173216885007278,
|
| 9789 |
+
"grad_norm": 2.508104085922241,
|
| 9790 |
+
"learning_rate": 7.441552821677966e-06,
|
| 9791 |
+
"loss": 5.7206,
|
| 9792 |
+
"step": 1393
|
| 9793 |
+
},
|
| 9794 |
+
{
|
| 9795 |
+
"epoch": 0.23189852360158036,
|
| 9796 |
+
"grad_norm": 1.3431934118270874,
|
| 9797 |
+
"learning_rate": 7.366373689411865e-06,
|
| 9798 |
+
"loss": 6.1445,
|
| 9799 |
+
"step": 1394
|
| 9800 |
+
},
|
| 9801 |
+
{
|
| 9802 |
+
"epoch": 0.23206487835308795,
|
| 9803 |
+
"grad_norm": 1.424025535583496,
|
| 9804 |
+
"learning_rate": 7.291561716223094e-06,
|
| 9805 |
+
"loss": 5.0971,
|
| 9806 |
+
"step": 1395
|
| 9807 |
+
},
|
| 9808 |
+
{
|
| 9809 |
+
"epoch": 0.23223123310459556,
|
| 9810 |
+
"grad_norm": 1.2438124418258667,
|
| 9811 |
+
"learning_rate": 7.217117198633561e-06,
|
| 9812 |
+
"loss": 5.4709,
|
| 9813 |
+
"step": 1396
|
| 9814 |
+
},
|
| 9815 |
+
{
|
| 9816 |
+
"epoch": 0.23239758785610315,
|
| 9817 |
+
"grad_norm": 1.3939536809921265,
|
| 9818 |
+
"learning_rate": 7.143040431708625e-06,
|
| 9819 |
+
"loss": 5.659,
|
| 9820 |
+
"step": 1397
|
| 9821 |
+
},
|
| 9822 |
+
{
|
| 9823 |
+
"epoch": 0.23256394260761074,
|
| 9824 |
+
"grad_norm": 1.2948864698410034,
|
| 9825 |
+
"learning_rate": 7.069331709056159e-06,
|
| 9826 |
+
"loss": 5.5306,
|
| 9827 |
+
"step": 1398
|
| 9828 |
+
},
|
| 9829 |
+
{
|
| 9830 |
+
"epoch": 0.23273029735911832,
|
| 9831 |
+
"grad_norm": 1.5987274646759033,
|
| 9832 |
+
"learning_rate": 6.995991322825191e-06,
|
| 9833 |
+
"loss": 5.4539,
|
| 9834 |
+
"step": 1399
|
| 9835 |
+
},
|
| 9836 |
+
{
|
| 9837 |
+
"epoch": 0.2328966521106259,
|
| 9838 |
+
"grad_norm": 1.3524385690689087,
|
| 9839 |
+
"learning_rate": 6.923019563704847e-06,
|
| 9840 |
+
"loss": 6.2608,
|
| 9841 |
+
"step": 1400
|
| 9842 |
+
},
|
| 9843 |
+
{
|
| 9844 |
+
"epoch": 0.2330630068621335,
|
| 9845 |
+
"grad_norm": 1.242741346359253,
|
| 9846 |
+
"learning_rate": 6.8504167209232364e-06,
|
| 9847 |
+
"loss": 4.864,
|
| 9848 |
+
"step": 1401
|
| 9849 |
+
},
|
| 9850 |
+
{
|
| 9851 |
+
"epoch": 0.23322936161364108,
|
| 9852 |
+
"grad_norm": 1.3633655309677124,
|
| 9853 |
+
"learning_rate": 6.778183082246148e-06,
|
| 9854 |
+
"loss": 4.7383,
|
| 9855 |
+
"step": 1402
|
| 9856 |
+
},
|
| 9857 |
+
{
|
| 9858 |
+
"epoch": 0.2333957163651487,
|
| 9859 |
+
"grad_norm": 1.393941879272461,
|
| 9860 |
+
"learning_rate": 6.706318933976064e-06,
|
| 9861 |
+
"loss": 6.5748,
|
| 9862 |
+
"step": 1403
|
| 9863 |
+
},
|
| 9864 |
+
{
|
| 9865 |
+
"epoch": 0.23356207111665628,
|
| 9866 |
+
"grad_norm": 1.8214443922042847,
|
| 9867 |
+
"learning_rate": 6.6348245609509475e-06,
|
| 9868 |
+
"loss": 6.0338,
|
| 9869 |
+
"step": 1404
|
| 9870 |
+
},
|
| 9871 |
+
{
|
| 9872 |
+
"epoch": 0.23372842586816386,
|
| 9873 |
+
"grad_norm": 1.4842872619628906,
|
| 9874 |
+
"learning_rate": 6.563700246543159e-06,
|
| 9875 |
+
"loss": 5.9823,
|
| 9876 |
+
"step": 1405
|
| 9877 |
+
},
|
| 9878 |
+
{
|
| 9879 |
+
"epoch": 0.23389478061967145,
|
| 9880 |
+
"grad_norm": 1.2983368635177612,
|
| 9881 |
+
"learning_rate": 6.492946272658296e-06,
|
| 9882 |
+
"loss": 4.4621,
|
| 9883 |
+
"step": 1406
|
| 9884 |
+
},
|
| 9885 |
+
{
|
| 9886 |
+
"epoch": 0.23406113537117904,
|
| 9887 |
+
"grad_norm": 1.8742061853408813,
|
| 9888 |
+
"learning_rate": 6.422562919734076e-06,
|
| 9889 |
+
"loss": 5.624,
|
| 9890 |
+
"step": 1407
|
| 9891 |
+
},
|
| 9892 |
+
{
|
| 9893 |
+
"epoch": 0.23422749012268662,
|
| 9894 |
+
"grad_norm": 1.5724475383758545,
|
| 9895 |
+
"learning_rate": 6.3525504667392595e-06,
|
| 9896 |
+
"loss": 5.6431,
|
| 9897 |
+
"step": 1408
|
| 9898 |
+
},
|
| 9899 |
+
{
|
| 9900 |
+
"epoch": 0.2343938448741942,
|
| 9901 |
+
"grad_norm": 1.2614651918411255,
|
| 9902 |
+
"learning_rate": 6.282909191172504e-06,
|
| 9903 |
+
"loss": 5.6967,
|
| 9904 |
+
"step": 1409
|
| 9905 |
+
},
|
| 9906 |
+
{
|
| 9907 |
+
"epoch": 0.23456019962570182,
|
| 9908 |
+
"grad_norm": 1.445389747619629,
|
| 9909 |
+
"learning_rate": 6.213639369061297e-06,
|
| 9910 |
+
"loss": 5.6292,
|
| 9911 |
+
"step": 1410
|
| 9912 |
+
},
|
| 9913 |
+
{
|
| 9914 |
+
"epoch": 0.2347265543772094,
|
| 9915 |
+
"grad_norm": 1.9041169881820679,
|
| 9916 |
+
"learning_rate": 6.14474127496083e-06,
|
| 9917 |
+
"loss": 5.863,
|
| 9918 |
+
"step": 1411
|
| 9919 |
+
},
|
| 9920 |
+
{
|
| 9921 |
+
"epoch": 0.234892909128717,
|
| 9922 |
+
"grad_norm": 1.3595950603485107,
|
| 9923 |
+
"learning_rate": 6.076215181952949e-06,
|
| 9924 |
+
"loss": 6.481,
|
| 9925 |
+
"step": 1412
|
| 9926 |
+
},
|
| 9927 |
+
{
|
| 9928 |
+
"epoch": 0.23505926388022458,
|
| 9929 |
+
"grad_norm": 1.4724054336547852,
|
| 9930 |
+
"learning_rate": 6.008061361645034e-06,
|
| 9931 |
+
"loss": 5.7306,
|
| 9932 |
+
"step": 1413
|
| 9933 |
+
},
|
| 9934 |
+
{
|
| 9935 |
+
"epoch": 0.23522561863173216,
|
| 9936 |
+
"grad_norm": 1.3218880891799927,
|
| 9937 |
+
"learning_rate": 5.940280084168947e-06,
|
| 9938 |
+
"loss": 4.827,
|
| 9939 |
+
"step": 1414
|
| 9940 |
+
},
|
| 9941 |
+
{
|
| 9942 |
+
"epoch": 0.23539197338323975,
|
| 9943 |
+
"grad_norm": 1.425896406173706,
|
| 9944 |
+
"learning_rate": 5.872871618179953e-06,
|
| 9945 |
+
"loss": 5.3692,
|
| 9946 |
+
"step": 1415
|
| 9947 |
+
},
|
| 9948 |
+
{
|
| 9949 |
+
"epoch": 0.23555832813474734,
|
| 9950 |
+
"grad_norm": 1.3277006149291992,
|
| 9951 |
+
"learning_rate": 5.805836230855655e-06,
|
| 9952 |
+
"loss": 5.4459,
|
| 9953 |
+
"step": 1416
|
| 9954 |
+
},
|
| 9955 |
+
{
|
| 9956 |
+
"epoch": 0.23572468288625495,
|
| 9957 |
+
"grad_norm": 1.417460322380066,
|
| 9958 |
+
"learning_rate": 5.739174187894925e-06,
|
| 9959 |
+
"loss": 4.9673,
|
| 9960 |
+
"step": 1417
|
| 9961 |
+
},
|
| 9962 |
+
{
|
| 9963 |
+
"epoch": 0.23589103763776254,
|
| 9964 |
+
"grad_norm": 1.5135000944137573,
|
| 9965 |
+
"learning_rate": 5.672885753516876e-06,
|
| 9966 |
+
"loss": 5.1652,
|
| 9967 |
+
"step": 1418
|
| 9968 |
+
},
|
| 9969 |
+
{
|
| 9970 |
+
"epoch": 0.23605739238927012,
|
| 9971 |
+
"grad_norm": 1.2179969549179077,
|
| 9972 |
+
"learning_rate": 5.606971190459809e-06,
|
| 9973 |
+
"loss": 5.2788,
|
| 9974 |
+
"step": 1419
|
| 9975 |
+
},
|
| 9976 |
+
{
|
| 9977 |
+
"epoch": 0.2362237471407777,
|
| 9978 |
+
"grad_norm": 1.2178833484649658,
|
| 9979 |
+
"learning_rate": 5.541430759980138e-06,
|
| 9980 |
+
"loss": 5.04,
|
| 9981 |
+
"step": 1420
|
| 9982 |
+
},
|
| 9983 |
+
{
|
| 9984 |
+
"epoch": 0.2363901018922853,
|
| 9985 |
+
"grad_norm": 1.6833577156066895,
|
| 9986 |
+
"learning_rate": 5.476264721851387e-06,
|
| 9987 |
+
"loss": 5.3247,
|
| 9988 |
+
"step": 1421
|
| 9989 |
+
},
|
| 9990 |
+
{
|
| 9991 |
+
"epoch": 0.23655645664379288,
|
| 9992 |
+
"grad_norm": 2.341933012008667,
|
| 9993 |
+
"learning_rate": 5.411473334363171e-06,
|
| 9994 |
+
"loss": 5.043,
|
| 9995 |
+
"step": 1422
|
| 9996 |
+
},
|
| 9997 |
+
{
|
| 9998 |
+
"epoch": 0.2367228113953005,
|
| 9999 |
+
"grad_norm": 1.5201689004898071,
|
| 10000 |
+
"learning_rate": 5.3470568543201314e-06,
|
| 10001 |
+
"loss": 6.8757,
|
| 10002 |
+
"step": 1423
|
| 10003 |
+
},
|
| 10004 |
+
{
|
| 10005 |
+
"epoch": 0.23688916614680808,
|
| 10006 |
+
"grad_norm": 1.3871538639068604,
|
| 10007 |
+
"learning_rate": 5.283015537040958e-06,
|
| 10008 |
+
"loss": 5.548,
|
| 10009 |
+
"step": 1424
|
| 10010 |
+
},
|
| 10011 |
+
{
|
| 10012 |
+
"epoch": 0.23705552089831566,
|
| 10013 |
+
"grad_norm": 1.1558271646499634,
|
| 10014 |
+
"learning_rate": 5.2193496363573516e-06,
|
| 10015 |
+
"loss": 4.2944,
|
| 10016 |
+
"step": 1425
|
| 10017 |
+
},
|
| 10018 |
+
{
|
| 10019 |
+
"epoch": 0.23722187564982325,
|
| 10020 |
+
"grad_norm": 1.4553383588790894,
|
| 10021 |
+
"learning_rate": 5.1560594046130115e-06,
|
| 10022 |
+
"loss": 5.5066,
|
| 10023 |
+
"step": 1426
|
| 10024 |
+
},
|
| 10025 |
+
{
|
| 10026 |
+
"epoch": 0.23738823040133084,
|
| 10027 |
+
"grad_norm": 1.6435545682907104,
|
| 10028 |
+
"learning_rate": 5.093145092662676e-06,
|
| 10029 |
+
"loss": 6.4555,
|
| 10030 |
+
"step": 1427
|
| 10031 |
+
},
|
| 10032 |
+
{
|
| 10033 |
+
"epoch": 0.23755458515283842,
|
| 10034 |
+
"grad_norm": 1.952124834060669,
|
| 10035 |
+
"learning_rate": 5.0306069498710974e-06,
|
| 10036 |
+
"loss": 6.2579,
|
| 10037 |
+
"step": 1428
|
| 10038 |
+
},
|
| 10039 |
+
{
|
| 10040 |
+
"epoch": 0.237720939904346,
|
| 10041 |
+
"grad_norm": 2.2525391578674316,
|
| 10042 |
+
"learning_rate": 4.968445224112051e-06,
|
| 10043 |
+
"loss": 4.8425,
|
| 10044 |
+
"step": 1429
|
| 10045 |
+
},
|
| 10046 |
+
{
|
| 10047 |
+
"epoch": 0.23788729465585362,
|
| 10048 |
+
"grad_norm": 1.8257288932800293,
|
| 10049 |
+
"learning_rate": 4.9066601617673515e-06,
|
| 10050 |
+
"loss": 5.0168,
|
| 10051 |
+
"step": 1430
|
| 10052 |
+
},
|
| 10053 |
+
{
|
| 10054 |
+
"epoch": 0.2380536494073612,
|
| 10055 |
+
"grad_norm": 1.468742847442627,
|
| 10056 |
+
"learning_rate": 4.845252007725909e-06,
|
| 10057 |
+
"loss": 4.9844,
|
| 10058 |
+
"step": 1431
|
| 10059 |
+
},
|
| 10060 |
+
{
|
| 10061 |
+
"epoch": 0.2382200041588688,
|
| 10062 |
+
"grad_norm": 1.226224660873413,
|
| 10063 |
+
"learning_rate": 4.784221005382705e-06,
|
| 10064 |
+
"loss": 5.3326,
|
| 10065 |
+
"step": 1432
|
| 10066 |
+
},
|
| 10067 |
+
{
|
| 10068 |
+
"epoch": 0.23838635891037638,
|
| 10069 |
+
"grad_norm": 1.5307928323745728,
|
| 10070 |
+
"learning_rate": 4.723567396637873e-06,
|
| 10071 |
+
"loss": 6.0279,
|
| 10072 |
+
"step": 1433
|
| 10073 |
+
},
|
| 10074 |
+
{
|
| 10075 |
+
"epoch": 0.23855271366188396,
|
| 10076 |
+
"grad_norm": 1.4953978061676025,
|
| 10077 |
+
"learning_rate": 4.663291421895743e-06,
|
| 10078 |
+
"loss": 5.8738,
|
| 10079 |
+
"step": 1434
|
| 10080 |
+
},
|
| 10081 |
+
{
|
| 10082 |
+
"epoch": 0.23871906841339155,
|
| 10083 |
+
"grad_norm": 1.2341142892837524,
|
| 10084 |
+
"learning_rate": 4.603393320063831e-06,
|
| 10085 |
+
"loss": 5.036,
|
| 10086 |
+
"step": 1435
|
| 10087 |
+
},
|
| 10088 |
+
{
|
| 10089 |
+
"epoch": 0.23888542316489914,
|
| 10090 |
+
"grad_norm": 1.4910740852355957,
|
| 10091 |
+
"learning_rate": 4.543873328551951e-06,
|
| 10092 |
+
"loss": 5.101,
|
| 10093 |
+
"step": 1436
|
| 10094 |
+
},
|
| 10095 |
+
{
|
| 10096 |
+
"epoch": 0.23905177791640675,
|
| 10097 |
+
"grad_norm": 1.460603952407837,
|
| 10098 |
+
"learning_rate": 4.484731683271259e-06,
|
| 10099 |
+
"loss": 6.2164,
|
| 10100 |
+
"step": 1437
|
| 10101 |
+
},
|
| 10102 |
+
{
|
| 10103 |
+
"epoch": 0.23921813266791433,
|
| 10104 |
+
"grad_norm": 1.2510490417480469,
|
| 10105 |
+
"learning_rate": 4.425968618633292e-06,
|
| 10106 |
+
"loss": 4.843,
|
| 10107 |
+
"step": 1438
|
| 10108 |
+
},
|
| 10109 |
+
{
|
| 10110 |
+
"epoch": 0.23938448741942192,
|
| 10111 |
+
"grad_norm": 1.265286922454834,
|
| 10112 |
+
"learning_rate": 4.367584367549082e-06,
|
| 10113 |
+
"loss": 5.0806,
|
| 10114 |
+
"step": 1439
|
| 10115 |
+
},
|
| 10116 |
+
{
|
| 10117 |
+
"epoch": 0.2395508421709295,
|
| 10118 |
+
"grad_norm": 1.4635465145111084,
|
| 10119 |
+
"learning_rate": 4.309579161428201e-06,
|
| 10120 |
+
"loss": 5.6361,
|
| 10121 |
+
"step": 1440
|
| 10122 |
+
},
|
| 10123 |
+
{
|
| 10124 |
+
"epoch": 0.2397171969224371,
|
| 10125 |
+
"grad_norm": 1.328041434288025,
|
| 10126 |
+
"learning_rate": 4.251953230177852e-06,
|
| 10127 |
+
"loss": 6.0966,
|
| 10128 |
+
"step": 1441
|
| 10129 |
+
},
|
| 10130 |
+
{
|
| 10131 |
+
"epoch": 0.23988355167394468,
|
| 10132 |
+
"grad_norm": 1.447161316871643,
|
| 10133 |
+
"learning_rate": 4.1947068022019646e-06,
|
| 10134 |
+
"loss": 5.2537,
|
| 10135 |
+
"step": 1442
|
| 10136 |
+
},
|
| 10137 |
+
{
|
| 10138 |
+
"epoch": 0.24004990642545226,
|
| 10139 |
+
"grad_norm": 1.4222345352172852,
|
| 10140 |
+
"learning_rate": 4.137840104400259e-06,
|
| 10141 |
+
"loss": 5.5492,
|
| 10142 |
+
"step": 1443
|
| 10143 |
+
},
|
| 10144 |
+
{
|
| 10145 |
+
"epoch": 0.24021626117695988,
|
| 10146 |
+
"grad_norm": 2.0605030059814453,
|
| 10147 |
+
"learning_rate": 4.081353362167406e-06,
|
| 10148 |
+
"loss": 5.6321,
|
| 10149 |
+
"step": 1444
|
| 10150 |
+
},
|
| 10151 |
+
{
|
| 10152 |
+
"epoch": 0.24038261592846746,
|
| 10153 |
+
"grad_norm": 1.3179957866668701,
|
| 10154 |
+
"learning_rate": 4.02524679939208e-06,
|
| 10155 |
+
"loss": 5.6233,
|
| 10156 |
+
"step": 1445
|
| 10157 |
+
},
|
| 10158 |
+
{
|
| 10159 |
+
"epoch": 0.24054897067997505,
|
| 10160 |
+
"grad_norm": 1.488689661026001,
|
| 10161 |
+
"learning_rate": 3.969520638456103e-06,
|
| 10162 |
+
"loss": 5.4363,
|
| 10163 |
+
"step": 1446
|
| 10164 |
+
},
|
| 10165 |
+
{
|
| 10166 |
+
"epoch": 0.24071532543148264,
|
| 10167 |
+
"grad_norm": 1.2847869396209717,
|
| 10168 |
+
"learning_rate": 3.91417510023353e-06,
|
| 10169 |
+
"loss": 5.2523,
|
| 10170 |
+
"step": 1447
|
| 10171 |
+
},
|
| 10172 |
+
{
|
| 10173 |
+
"epoch": 0.24088168018299022,
|
| 10174 |
+
"grad_norm": 1.4571582078933716,
|
| 10175 |
+
"learning_rate": 3.859210404089808e-06,
|
| 10176 |
+
"loss": 5.9545,
|
| 10177 |
+
"step": 1448
|
| 10178 |
+
},
|
| 10179 |
+
{
|
| 10180 |
+
"epoch": 0.2410480349344978,
|
| 10181 |
+
"grad_norm": 1.4029744863510132,
|
| 10182 |
+
"learning_rate": 3.8046267678809034e-06,
|
| 10183 |
+
"loss": 5.8464,
|
| 10184 |
+
"step": 1449
|
| 10185 |
+
},
|
| 10186 |
+
{
|
| 10187 |
+
"epoch": 0.2412143896860054,
|
| 10188 |
+
"grad_norm": 1.1866133213043213,
|
| 10189 |
+
"learning_rate": 3.750424407952402e-06,
|
| 10190 |
+
"loss": 5.1071,
|
| 10191 |
+
"step": 1450
|
| 10192 |
+
},
|
| 10193 |
+
{
|
| 10194 |
+
"epoch": 0.241380744437513,
|
| 10195 |
+
"grad_norm": 2.056365966796875,
|
| 10196 |
+
"learning_rate": 3.696603539138699e-06,
|
| 10197 |
+
"loss": 4.642,
|
| 10198 |
+
"step": 1451
|
| 10199 |
+
},
|
| 10200 |
+
{
|
| 10201 |
+
"epoch": 0.2415470991890206,
|
| 10202 |
+
"grad_norm": 1.5950109958648682,
|
| 10203 |
+
"learning_rate": 3.6431643747621203e-06,
|
| 10204 |
+
"loss": 5.3046,
|
| 10205 |
+
"step": 1452
|
| 10206 |
+
},
|
| 10207 |
+
{
|
| 10208 |
+
"epoch": 0.24171345394052818,
|
| 10209 |
+
"grad_norm": 1.3917522430419922,
|
| 10210 |
+
"learning_rate": 3.5901071266320805e-06,
|
| 10211 |
+
"loss": 4.8896,
|
| 10212 |
+
"step": 1453
|
| 10213 |
+
},
|
| 10214 |
+
{
|
| 10215 |
+
"epoch": 0.24187980869203576,
|
| 10216 |
+
"grad_norm": 1.493078589439392,
|
| 10217 |
+
"learning_rate": 3.53743200504425e-06,
|
| 10218 |
+
"loss": 5.0957,
|
| 10219 |
+
"step": 1454
|
| 10220 |
+
},
|
| 10221 |
+
{
|
| 10222 |
+
"epoch": 0.24204616344354335,
|
| 10223 |
+
"grad_norm": 1.3064144849777222,
|
| 10224 |
+
"learning_rate": 3.4851392187797096e-06,
|
| 10225 |
+
"loss": 5.4055,
|
| 10226 |
+
"step": 1455
|
| 10227 |
+
},
|
| 10228 |
+
{
|
| 10229 |
+
"epoch": 0.24221251819505094,
|
| 10230 |
+
"grad_norm": 1.4962061643600464,
|
| 10231 |
+
"learning_rate": 3.4332289751041526e-06,
|
| 10232 |
+
"loss": 5.1962,
|
| 10233 |
+
"step": 1456
|
| 10234 |
+
},
|
| 10235 |
+
{
|
| 10236 |
+
"epoch": 0.24237887294655855,
|
| 10237 |
+
"grad_norm": 1.4521700143814087,
|
| 10238 |
+
"learning_rate": 3.3817014797669855e-06,
|
| 10239 |
+
"loss": 6.1374,
|
| 10240 |
+
"step": 1457
|
| 10241 |
+
},
|
| 10242 |
+
{
|
| 10243 |
+
"epoch": 0.24254522769806613,
|
| 10244 |
+
"grad_norm": 2.0277411937713623,
|
| 10245 |
+
"learning_rate": 3.3305569370006394e-06,
|
| 10246 |
+
"loss": 5.694,
|
| 10247 |
+
"step": 1458
|
| 10248 |
+
},
|
| 10249 |
+
{
|
| 10250 |
+
"epoch": 0.24271158244957372,
|
| 10251 |
+
"grad_norm": 1.408645510673523,
|
| 10252 |
+
"learning_rate": 3.2797955495196485e-06,
|
| 10253 |
+
"loss": 5.5106,
|
| 10254 |
+
"step": 1459
|
| 10255 |
+
},
|
| 10256 |
+
{
|
| 10257 |
+
"epoch": 0.2428779372010813,
|
| 10258 |
+
"grad_norm": 1.380612850189209,
|
| 10259 |
+
"learning_rate": 3.229417518519884e-06,
|
| 10260 |
+
"loss": 5.329,
|
| 10261 |
+
"step": 1460
|
| 10262 |
+
},
|
| 10263 |
+
{
|
| 10264 |
+
"epoch": 0.2430442919525889,
|
| 10265 |
+
"grad_norm": 1.4219053983688354,
|
| 10266 |
+
"learning_rate": 3.179423043677787e-06,
|
| 10267 |
+
"loss": 5.0802,
|
| 10268 |
+
"step": 1461
|
| 10269 |
+
},
|
| 10270 |
+
{
|
| 10271 |
+
"epoch": 0.24321064670409648,
|
| 10272 |
+
"grad_norm": 1.5316503047943115,
|
| 10273 |
+
"learning_rate": 3.129812323149528e-06,
|
| 10274 |
+
"loss": 6.6139,
|
| 10275 |
+
"step": 1462
|
| 10276 |
+
},
|
| 10277 |
+
{
|
| 10278 |
+
"epoch": 0.24337700145560406,
|
| 10279 |
+
"grad_norm": 1.2716059684753418,
|
| 10280 |
+
"learning_rate": 3.0805855535702478e-06,
|
| 10281 |
+
"loss": 4.9578,
|
| 10282 |
+
"step": 1463
|
| 10283 |
+
},
|
| 10284 |
+
{
|
| 10285 |
+
"epoch": 0.24354335620711168,
|
| 10286 |
+
"grad_norm": 1.3404194116592407,
|
| 10287 |
+
"learning_rate": 3.031742930053283e-06,
|
| 10288 |
+
"loss": 5.57,
|
| 10289 |
+
"step": 1464
|
| 10290 |
+
},
|
| 10291 |
+
{
|
| 10292 |
+
"epoch": 0.24370971095861926,
|
| 10293 |
+
"grad_norm": 1.31460702419281,
|
| 10294 |
+
"learning_rate": 2.983284646189377e-06,
|
| 10295 |
+
"loss": 5.5931,
|
| 10296 |
+
"step": 1465
|
| 10297 |
+
},
|
| 10298 |
+
{
|
| 10299 |
+
"epoch": 0.24387606571012685,
|
| 10300 |
+
"grad_norm": 1.2760391235351562,
|
| 10301 |
+
"learning_rate": 2.9352108940459035e-06,
|
| 10302 |
+
"loss": 5.3554,
|
| 10303 |
+
"step": 1466
|
| 10304 |
+
},
|
| 10305 |
+
{
|
| 10306 |
+
"epoch": 0.24404242046163444,
|
| 10307 |
+
"grad_norm": 1.5211275815963745,
|
| 10308 |
+
"learning_rate": 2.887521864166154e-06,
|
| 10309 |
+
"loss": 4.8912,
|
| 10310 |
+
"step": 1467
|
| 10311 |
+
},
|
| 10312 |
+
{
|
| 10313 |
+
"epoch": 0.24420877521314202,
|
| 10314 |
+
"grad_norm": 1.5410618782043457,
|
| 10315 |
+
"learning_rate": 2.8402177455685296e-06,
|
| 10316 |
+
"loss": 5.3953,
|
| 10317 |
+
"step": 1468
|
| 10318 |
+
},
|
| 10319 |
+
{
|
| 10320 |
+
"epoch": 0.2443751299646496,
|
| 10321 |
+
"grad_norm": 1.3413052558898926,
|
| 10322 |
+
"learning_rate": 2.7932987257458078e-06,
|
| 10323 |
+
"loss": 5.6461,
|
| 10324 |
+
"step": 1469
|
| 10325 |
+
},
|
| 10326 |
+
{
|
| 10327 |
+
"epoch": 0.2445414847161572,
|
| 10328 |
+
"grad_norm": 1.2666797637939453,
|
| 10329 |
+
"learning_rate": 2.74676499066443e-06,
|
| 10330 |
+
"loss": 5.7966,
|
| 10331 |
+
"step": 1470
|
| 10332 |
+
},
|
| 10333 |
+
{
|
| 10334 |
+
"epoch": 0.2447078394676648,
|
| 10335 |
+
"grad_norm": 1.4352855682373047,
|
| 10336 |
+
"learning_rate": 2.7006167247636825e-06,
|
| 10337 |
+
"loss": 5.7694,
|
| 10338 |
+
"step": 1471
|
| 10339 |
+
},
|
| 10340 |
+
{
|
| 10341 |
+
"epoch": 0.2448741942191724,
|
| 10342 |
+
"grad_norm": 1.1607012748718262,
|
| 10343 |
+
"learning_rate": 2.654854110955085e-06,
|
| 10344 |
+
"loss": 5.2412,
|
| 10345 |
+
"step": 1472
|
| 10346 |
+
},
|
| 10347 |
+
{
|
| 10348 |
+
"epoch": 0.24504054897067998,
|
| 10349 |
+
"grad_norm": 1.308740258216858,
|
| 10350 |
+
"learning_rate": 2.609477330621557e-06,
|
| 10351 |
+
"loss": 5.8695,
|
| 10352 |
+
"step": 1473
|
| 10353 |
+
},
|
| 10354 |
+
{
|
| 10355 |
+
"epoch": 0.24520690372218756,
|
| 10356 |
+
"grad_norm": 1.5638188123703003,
|
| 10357 |
+
"learning_rate": 2.564486563616786e-06,
|
| 10358 |
+
"loss": 5.0983,
|
| 10359 |
+
"step": 1474
|
| 10360 |
+
},
|
| 10361 |
+
{
|
| 10362 |
+
"epoch": 0.24537325847369515,
|
| 10363 |
+
"grad_norm": 1.5993167161941528,
|
| 10364 |
+
"learning_rate": 2.5198819882644163e-06,
|
| 10365 |
+
"loss": 5.6609,
|
| 10366 |
+
"step": 1475
|
| 10367 |
+
},
|
| 10368 |
+
{
|
| 10369 |
+
"epoch": 0.24553961322520274,
|
| 10370 |
+
"grad_norm": 1.2868083715438843,
|
| 10371 |
+
"learning_rate": 2.475663781357429e-06,
|
| 10372 |
+
"loss": 5.8212,
|
| 10373 |
+
"step": 1476
|
| 10374 |
+
},
|
| 10375 |
+
{
|
| 10376 |
+
"epoch": 0.24570596797671032,
|
| 10377 |
+
"grad_norm": 1.4746071100234985,
|
| 10378 |
+
"learning_rate": 2.4318321181574287e-06,
|
| 10379 |
+
"loss": 6.0063,
|
| 10380 |
+
"step": 1477
|
| 10381 |
+
},
|
| 10382 |
+
{
|
| 10383 |
+
"epoch": 0.24587232272821793,
|
| 10384 |
+
"grad_norm": 1.905931830406189,
|
| 10385 |
+
"learning_rate": 2.388387172393891e-06,
|
| 10386 |
+
"loss": 5.8212,
|
| 10387 |
+
"step": 1478
|
| 10388 |
+
},
|
| 10389 |
+
{
|
| 10390 |
+
"epoch": 0.24603867747972552,
|
| 10391 |
+
"grad_norm": 1.387951135635376,
|
| 10392 |
+
"learning_rate": 2.3453291162635505e-06,
|
| 10393 |
+
"loss": 5.4859,
|
| 10394 |
+
"step": 1479
|
| 10395 |
+
},
|
| 10396 |
+
{
|
| 10397 |
+
"epoch": 0.2462050322312331,
|
| 10398 |
+
"grad_norm": 1.5619620084762573,
|
| 10399 |
+
"learning_rate": 2.3026581204296347e-06,
|
| 10400 |
+
"loss": 4.5281,
|
| 10401 |
+
"step": 1480
|
| 10402 |
+
},
|
| 10403 |
+
{
|
| 10404 |
+
"epoch": 0.2463713869827407,
|
| 10405 |
+
"grad_norm": 1.381524682044983,
|
| 10406 |
+
"learning_rate": 2.2603743540212664e-06,
|
| 10407 |
+
"loss": 5.6491,
|
| 10408 |
+
"step": 1481
|
| 10409 |
+
},
|
| 10410 |
+
{
|
| 10411 |
+
"epoch": 0.24653774173424828,
|
| 10412 |
+
"grad_norm": 1.264552354812622,
|
| 10413 |
+
"learning_rate": 2.218477984632783e-06,
|
| 10414 |
+
"loss": 5.3561,
|
| 10415 |
+
"step": 1482
|
| 10416 |
+
},
|
| 10417 |
+
{
|
| 10418 |
+
"epoch": 0.24670409648575586,
|
| 10419 |
+
"grad_norm": 1.2778496742248535,
|
| 10420 |
+
"learning_rate": 2.176969178322985e-06,
|
| 10421 |
+
"loss": 5.4601,
|
| 10422 |
+
"step": 1483
|
| 10423 |
+
},
|
| 10424 |
+
{
|
| 10425 |
+
"epoch": 0.24687045123726348,
|
| 10426 |
+
"grad_norm": 1.3864541053771973,
|
| 10427 |
+
"learning_rate": 2.1358480996146237e-06,
|
| 10428 |
+
"loss": 5.7944,
|
| 10429 |
+
"step": 1484
|
| 10430 |
+
},
|
| 10431 |
+
{
|
| 10432 |
+
"epoch": 0.24703680598877106,
|
| 10433 |
+
"grad_norm": 1.352135419845581,
|
| 10434 |
+
"learning_rate": 2.0951149114935674e-06,
|
| 10435 |
+
"loss": 5.1032,
|
| 10436 |
+
"step": 1485
|
| 10437 |
+
},
|
| 10438 |
+
{
|
| 10439 |
+
"epoch": 0.24720316074027865,
|
| 10440 |
+
"grad_norm": 1.2946531772613525,
|
| 10441 |
+
"learning_rate": 2.0547697754083605e-06,
|
| 10442 |
+
"loss": 5.2929,
|
| 10443 |
+
"step": 1486
|
| 10444 |
+
},
|
| 10445 |
+
{
|
| 10446 |
+
"epoch": 0.24736951549178623,
|
| 10447 |
+
"grad_norm": 1.4004677534103394,
|
| 10448 |
+
"learning_rate": 2.0148128512694207e-06,
|
| 10449 |
+
"loss": 5.0623,
|
| 10450 |
+
"step": 1487
|
| 10451 |
+
},
|
| 10452 |
+
{
|
| 10453 |
+
"epoch": 0.24753587024329382,
|
| 10454 |
+
"grad_norm": 1.7179275751113892,
|
| 10455 |
+
"learning_rate": 1.9752442974484644e-06,
|
| 10456 |
+
"loss": 5.6663,
|
| 10457 |
+
"step": 1488
|
| 10458 |
+
},
|
| 10459 |
+
{
|
| 10460 |
+
"epoch": 0.2477022249948014,
|
| 10461 |
+
"grad_norm": 1.3250117301940918,
|
| 10462 |
+
"learning_rate": 1.9360642707779152e-06,
|
| 10463 |
+
"loss": 5.5852,
|
| 10464 |
+
"step": 1489
|
| 10465 |
+
},
|
| 10466 |
+
{
|
| 10467 |
+
"epoch": 0.247868579746309,
|
| 10468 |
+
"grad_norm": 1.4580737352371216,
|
| 10469 |
+
"learning_rate": 1.8972729265501755e-06,
|
| 10470 |
+
"loss": 5.9216,
|
| 10471 |
+
"step": 1490
|
| 10472 |
+
},
|
| 10473 |
+
{
|
| 10474 |
+
"epoch": 0.2480349344978166,
|
| 10475 |
+
"grad_norm": 1.4018464088439941,
|
| 10476 |
+
"learning_rate": 1.8588704185171557e-06,
|
| 10477 |
+
"loss": 4.8588,
|
| 10478 |
+
"step": 1491
|
| 10479 |
+
},
|
| 10480 |
+
{
|
| 10481 |
+
"epoch": 0.2482012892493242,
|
| 10482 |
+
"grad_norm": 1.7278724908828735,
|
| 10483 |
+
"learning_rate": 1.8208568988895558e-06,
|
| 10484 |
+
"loss": 5.9364,
|
| 10485 |
+
"step": 1492
|
| 10486 |
+
},
|
| 10487 |
+
{
|
| 10488 |
+
"epoch": 0.24836764400083178,
|
| 10489 |
+
"grad_norm": 1.3054454326629639,
|
| 10490 |
+
"learning_rate": 1.7832325183363087e-06,
|
| 10491 |
+
"loss": 5.6599,
|
| 10492 |
+
"step": 1493
|
| 10493 |
+
},
|
| 10494 |
+
{
|
| 10495 |
+
"epoch": 0.24853399875233936,
|
| 10496 |
+
"grad_norm": 1.523970127105713,
|
| 10497 |
+
"learning_rate": 1.7459974259839363e-06,
|
| 10498 |
+
"loss": 5.2485,
|
| 10499 |
+
"step": 1494
|
| 10500 |
+
},
|
| 10501 |
+
{
|
| 10502 |
+
"epoch": 0.24870035350384695,
|
| 10503 |
+
"grad_norm": 1.3739322423934937,
|
| 10504 |
+
"learning_rate": 1.7091517694160286e-06,
|
| 10505 |
+
"loss": 6.093,
|
| 10506 |
+
"step": 1495
|
| 10507 |
+
},
|
| 10508 |
+
{
|
| 10509 |
+
"epoch": 0.24886670825535454,
|
| 10510 |
+
"grad_norm": 1.7493211030960083,
|
| 10511 |
+
"learning_rate": 1.6726956946726214e-06,
|
| 10512 |
+
"loss": 5.5142,
|
| 10513 |
+
"step": 1496
|
| 10514 |
+
},
|
| 10515 |
+
{
|
| 10516 |
+
"epoch": 0.24903306300686212,
|
| 10517 |
+
"grad_norm": 1.4517079591751099,
|
| 10518 |
+
"learning_rate": 1.6366293462495963e-06,
|
| 10519 |
+
"loss": 5.9315,
|
| 10520 |
+
"step": 1497
|
| 10521 |
+
},
|
| 10522 |
+
{
|
| 10523 |
+
"epoch": 0.24919941775836973,
|
| 10524 |
+
"grad_norm": 1.3757200241088867,
|
| 10525 |
+
"learning_rate": 1.6009528670981711e-06,
|
| 10526 |
+
"loss": 5.8995,
|
| 10527 |
+
"step": 1498
|
| 10528 |
+
},
|
| 10529 |
+
{
|
| 10530 |
+
"epoch": 0.24936577250987732,
|
| 10531 |
+
"grad_norm": 1.7322890758514404,
|
| 10532 |
+
"learning_rate": 1.5656663986242326e-06,
|
| 10533 |
+
"loss": 5.7356,
|
| 10534 |
+
"step": 1499
|
| 10535 |
+
},
|
| 10536 |
+
{
|
| 10537 |
+
"epoch": 0.2495321272613849,
|
| 10538 |
+
"grad_norm": 1.3153589963912964,
|
| 10539 |
+
"learning_rate": 1.5307700806878821e-06,
|
| 10540 |
+
"loss": 5.0523,
|
| 10541 |
+
"step": 1500
|
| 10542 |
+
},
|
| 10543 |
+
{
|
| 10544 |
+
"epoch": 0.2496984820128925,
|
| 10545 |
+
"grad_norm": 1.4346563816070557,
|
| 10546 |
+
"learning_rate": 1.4962640516028248e-06,
|
| 10547 |
+
"loss": 5.0294,
|
| 10548 |
+
"step": 1501
|
| 10549 |
+
},
|
| 10550 |
+
{
|
| 10551 |
+
"epoch": 0.24986483676440008,
|
| 10552 |
+
"grad_norm": 1.7049684524536133,
|
| 10553 |
+
"learning_rate": 1.462148448135836e-06,
|
| 10554 |
+
"loss": 5.7539,
|
| 10555 |
+
"step": 1502
|
| 10556 |
+
},
|
| 10557 |
+
{
|
| 10558 |
+
"epoch": 0.2500311915159077,
|
| 10559 |
+
"grad_norm": 1.3543156385421753,
|
| 10560 |
+
"learning_rate": 1.4284234055062185e-06,
|
| 10561 |
+
"loss": 4.4511,
|
| 10562 |
+
"step": 1503
|
| 10563 |
+
},
|
| 10564 |
+
{
|
| 10565 |
+
"epoch": 0.25019754626741525,
|
| 10566 |
+
"grad_norm": 1.3412731885910034,
|
| 10567 |
+
"learning_rate": 1.3950890573852126e-06,
|
| 10568 |
+
"loss": 5.7474,
|
| 10569 |
+
"step": 1504
|
| 10570 |
+
},
|
| 10571 |
+
{
|
| 10572 |
+
"epoch": 0.25036390101892286,
|
| 10573 |
+
"grad_norm": 1.474424123764038,
|
| 10574 |
+
"learning_rate": 1.362145535895587e-06,
|
| 10575 |
+
"loss": 5.8365,
|
| 10576 |
+
"step": 1505
|
| 10577 |
+
},
|
| 10578 |
+
{
|
| 10579 |
+
"epoch": 0.2505302557704304,
|
| 10580 |
+
"grad_norm": 1.3611209392547607,
|
| 10581 |
+
"learning_rate": 1.3295929716110267e-06,
|
| 10582 |
+
"loss": 5.0218,
|
| 10583 |
+
"step": 1506
|
| 10584 |
+
},
|
| 10585 |
+
{
|
| 10586 |
+
"epoch": 0.25069661052193803,
|
| 10587 |
+
"grad_norm": 1.6008236408233643,
|
| 10588 |
+
"learning_rate": 1.297431493555612e-06,
|
| 10589 |
+
"loss": 5.613,
|
| 10590 |
+
"step": 1507
|
| 10591 |
+
},
|
| 10592 |
+
{
|
| 10593 |
+
"epoch": 0.25086296527344565,
|
| 10594 |
+
"grad_norm": 1.5239381790161133,
|
| 10595 |
+
"learning_rate": 1.2656612292033187e-06,
|
| 10596 |
+
"loss": 5.007,
|
| 10597 |
+
"step": 1508
|
| 10598 |
+
},
|
| 10599 |
+
{
|
| 10600 |
+
"epoch": 0.2510293200249532,
|
| 10601 |
+
"grad_norm": 1.355724573135376,
|
| 10602 |
+
"learning_rate": 1.2342823044775743e-06,
|
| 10603 |
+
"loss": 4.7736,
|
| 10604 |
+
"step": 1509
|
| 10605 |
+
},
|
| 10606 |
+
{
|
| 10607 |
+
"epoch": 0.2511956747764608,
|
| 10608 |
+
"grad_norm": 1.5540764331817627,
|
| 10609 |
+
"learning_rate": 1.2032948437506576e-06,
|
| 10610 |
+
"loss": 6.3333,
|
| 10611 |
+
"step": 1510
|
| 10612 |
+
},
|
| 10613 |
+
{
|
| 10614 |
+
"epoch": 0.2513620295279684,
|
| 10615 |
+
"grad_norm": 1.4453798532485962,
|
| 10616 |
+
"learning_rate": 1.1726989698432888e-06,
|
| 10617 |
+
"loss": 5.4289,
|
| 10618 |
+
"step": 1511
|
| 10619 |
+
},
|
| 10620 |
+
{
|
| 10621 |
+
"epoch": 0.251528384279476,
|
| 10622 |
+
"grad_norm": 1.5391063690185547,
|
| 10623 |
+
"learning_rate": 1.1424948040240969e-06,
|
| 10624 |
+
"loss": 5.1791,
|
| 10625 |
+
"step": 1512
|
| 10626 |
+
},
|
| 10627 |
+
{
|
| 10628 |
+
"epoch": 0.25169473903098355,
|
| 10629 |
+
"grad_norm": 1.5789345502853394,
|
| 10630 |
+
"learning_rate": 1.1126824660091516e-06,
|
| 10631 |
+
"loss": 4.6393,
|
| 10632 |
+
"step": 1513
|
| 10633 |
+
},
|
| 10634 |
+
{
|
| 10635 |
+
"epoch": 0.25186109378249116,
|
| 10636 |
+
"grad_norm": 1.4001564979553223,
|
| 10637 |
+
"learning_rate": 1.0832620739614663e-06,
|
| 10638 |
+
"loss": 4.8468,
|
| 10639 |
+
"step": 1514
|
| 10640 |
+
},
|
| 10641 |
+
{
|
| 10642 |
+
"epoch": 0.2520274485339988,
|
| 10643 |
+
"grad_norm": 1.2637028694152832,
|
| 10644 |
+
"learning_rate": 1.054233744490607e-06,
|
| 10645 |
+
"loss": 5.2218,
|
| 10646 |
+
"step": 1515
|
| 10647 |
+
},
|
| 10648 |
+
{
|
| 10649 |
+
"epoch": 0.25219380328550633,
|
| 10650 |
+
"grad_norm": 1.2566747665405273,
|
| 10651 |
+
"learning_rate": 1.0255975926521166e-06,
|
| 10652 |
+
"loss": 5.5905,
|
| 10653 |
+
"step": 1516
|
| 10654 |
+
},
|
| 10655 |
+
{
|
| 10656 |
+
"epoch": 0.25236015803701395,
|
| 10657 |
+
"grad_norm": 2.2502365112304688,
|
| 10658 |
+
"learning_rate": 9.973537319471704e-07,
|
| 10659 |
+
"loss": 5.3755,
|
| 10660 |
+
"step": 1517
|
| 10661 |
+
},
|
| 10662 |
+
{
|
| 10663 |
+
"epoch": 0.2525265127885215,
|
| 10664 |
+
"grad_norm": 1.3299554586410522,
|
| 10665 |
+
"learning_rate": 9.695022743220317e-07,
|
| 10666 |
+
"loss": 5.4233,
|
| 10667 |
+
"step": 1518
|
| 10668 |
+
},
|
| 10669 |
+
{
|
| 10670 |
+
"epoch": 0.2526928675400291,
|
| 10671 |
+
"grad_norm": 1.4471224546432495,
|
| 10672 |
+
"learning_rate": 9.420433301676634e-07,
|
| 10673 |
+
"loss": 5.8499,
|
| 10674 |
+
"step": 1519
|
| 10675 |
+
},
|
| 10676 |
+
{
|
| 10677 |
+
"epoch": 0.2528592222915367,
|
| 10678 |
+
"grad_norm": 1.1976372003555298,
|
| 10679 |
+
"learning_rate": 9.149770083192955e-07,
|
| 10680 |
+
"loss": 4.7387,
|
| 10681 |
+
"step": 1520
|
| 10682 |
+
},
|
| 10683 |
+
{
|
| 10684 |
+
"epoch": 0.2530255770430443,
|
| 10685 |
+
"grad_norm": 1.506158709526062,
|
| 10686 |
+
"learning_rate": 8.883034160559467e-07,
|
| 10687 |
+
"loss": 5.1427,
|
| 10688 |
+
"step": 1521
|
| 10689 |
+
},
|
| 10690 |
+
{
|
| 10691 |
+
"epoch": 0.2531919317945519,
|
| 10692 |
+
"grad_norm": 1.331089735031128,
|
| 10693 |
+
"learning_rate": 8.620226591000479e-07,
|
| 10694 |
+
"loss": 5.2828,
|
| 10695 |
+
"step": 1522
|
| 10696 |
+
},
|
| 10697 |
+
{
|
| 10698 |
+
"epoch": 0.25335828654605946,
|
| 10699 |
+
"grad_norm": 1.446223258972168,
|
| 10700 |
+
"learning_rate": 8.361348416169979e-07,
|
| 10701 |
+
"loss": 5.9693,
|
| 10702 |
+
"step": 1523
|
| 10703 |
+
},
|
| 10704 |
+
{
|
| 10705 |
+
"epoch": 0.2535246412975671,
|
| 10706 |
+
"grad_norm": 1.3121943473815918,
|
| 10707 |
+
"learning_rate": 8.106400662147295e-07,
|
| 10708 |
+
"loss": 5.4518,
|
| 10709 |
+
"step": 1524
|
| 10710 |
+
},
|
| 10711 |
+
{
|
| 10712 |
+
"epoch": 0.25369099604907464,
|
| 10713 |
+
"grad_norm": 1.370236873626709,
|
| 10714 |
+
"learning_rate": 7.855384339433891e-07,
|
| 10715 |
+
"loss": 5.4269,
|
| 10716 |
+
"step": 1525
|
| 10717 |
+
},
|
| 10718 |
+
{
|
| 10719 |
+
"epoch": 0.25385735080058225,
|
| 10720 |
+
"grad_norm": 1.2394113540649414,
|
| 10721 |
+
"learning_rate": 7.608300442948358e-07,
|
| 10722 |
+
"loss": 5.6053,
|
| 10723 |
+
"step": 1526
|
| 10724 |
+
},
|
| 10725 |
+
{
|
| 10726 |
+
"epoch": 0.2540237055520898,
|
| 10727 |
+
"grad_norm": 1.2341454029083252,
|
| 10728 |
+
"learning_rate": 7.365149952022643e-07,
|
| 10729 |
+
"loss": 5.8554,
|
| 10730 |
+
"step": 1527
|
| 10731 |
+
},
|
| 10732 |
+
{
|
| 10733 |
+
"epoch": 0.2541900603035974,
|
| 10734 |
+
"grad_norm": 1.4752355813980103,
|
| 10735 |
+
"learning_rate": 7.125933830398945e-07,
|
| 10736 |
+
"loss": 5.8309,
|
| 10737 |
+
"step": 1528
|
| 10738 |
+
},
|
| 10739 |
+
{
|
| 10740 |
+
"epoch": 0.25435641505510503,
|
| 10741 |
+
"grad_norm": 1.3322540521621704,
|
| 10742 |
+
"learning_rate": 6.890653026224935e-07,
|
| 10743 |
+
"loss": 6.5364,
|
| 10744 |
+
"step": 1529
|
| 10745 |
+
},
|
| 10746 |
+
{
|
| 10747 |
+
"epoch": 0.2545227698066126,
|
| 10748 |
+
"grad_norm": 1.3641579151153564,
|
| 10749 |
+
"learning_rate": 6.659308472050651e-07,
|
| 10750 |
+
"loss": 5.4463,
|
| 10751 |
+
"step": 1530
|
| 10752 |
+
},
|
| 10753 |
+
{
|
| 10754 |
+
"epoch": 0.2546891245581202,
|
| 10755 |
+
"grad_norm": 1.4290276765823364,
|
| 10756 |
+
"learning_rate": 6.431901084824499e-07,
|
| 10757 |
+
"loss": 7.1066,
|
| 10758 |
+
"step": 1531
|
| 10759 |
+
},
|
| 10760 |
+
{
|
| 10761 |
+
"epoch": 0.25485547930962776,
|
| 10762 |
+
"grad_norm": 1.2599397897720337,
|
| 10763 |
+
"learning_rate": 6.208431765889477e-07,
|
| 10764 |
+
"loss": 5.4277,
|
| 10765 |
+
"step": 1532
|
| 10766 |
+
},
|
| 10767 |
+
{
|
| 10768 |
+
"epoch": 0.2550218340611354,
|
| 10769 |
+
"grad_norm": 1.1978791952133179,
|
| 10770 |
+
"learning_rate": 5.988901400980073e-07,
|
| 10771 |
+
"loss": 5.4124,
|
| 10772 |
+
"step": 1533
|
| 10773 |
+
},
|
| 10774 |
+
{
|
| 10775 |
+
"epoch": 0.25518818881264294,
|
| 10776 |
+
"grad_norm": 1.4857439994812012,
|
| 10777 |
+
"learning_rate": 5.773310860218373e-07,
|
| 10778 |
+
"loss": 5.5772,
|
| 10779 |
+
"step": 1534
|
| 10780 |
+
},
|
| 10781 |
+
{
|
| 10782 |
+
"epoch": 0.25535454356415055,
|
| 10783 |
+
"grad_norm": 1.432802438735962,
|
| 10784 |
+
"learning_rate": 5.561660998110952e-07,
|
| 10785 |
+
"loss": 6.091,
|
| 10786 |
+
"step": 1535
|
| 10787 |
+
},
|
| 10788 |
+
{
|
| 10789 |
+
"epoch": 0.25552089831565816,
|
| 10790 |
+
"grad_norm": 1.843793272972107,
|
| 10791 |
+
"learning_rate": 5.353952653544769e-07,
|
| 10792 |
+
"loss": 5.428,
|
| 10793 |
+
"step": 1536
|
| 10794 |
+
},
|
| 10795 |
+
{
|
| 10796 |
+
"epoch": 0.2556872530671657,
|
| 10797 |
+
"grad_norm": 1.5614017248153687,
|
| 10798 |
+
"learning_rate": 5.150186649784728e-07,
|
| 10799 |
+
"loss": 4.6047,
|
| 10800 |
+
"step": 1537
|
| 10801 |
+
},
|
| 10802 |
+
{
|
| 10803 |
+
"epoch": 0.25585360781867333,
|
| 10804 |
+
"grad_norm": 2.2080225944519043,
|
| 10805 |
+
"learning_rate": 4.950363794470003e-07,
|
| 10806 |
+
"loss": 5.0737,
|
| 10807 |
+
"step": 1538
|
| 10808 |
+
},
|
| 10809 |
+
{
|
| 10810 |
+
"epoch": 0.2560199625701809,
|
| 10811 |
+
"grad_norm": 1.4205772876739502,
|
| 10812 |
+
"learning_rate": 4.75448487961061e-07,
|
| 10813 |
+
"loss": 6.0652,
|
| 10814 |
+
"step": 1539
|
| 10815 |
+
},
|
| 10816 |
+
{
|
| 10817 |
+
"epoch": 0.2561863173216885,
|
| 10818 |
+
"grad_norm": 1.3975862264633179,
|
| 10819 |
+
"learning_rate": 4.562550681584954e-07,
|
| 10820 |
+
"loss": 5.5197,
|
| 10821 |
+
"step": 1540
|
| 10822 |
+
},
|
| 10823 |
+
{
|
| 10824 |
+
"epoch": 0.25635267207319606,
|
| 10825 |
+
"grad_norm": 1.3709969520568848,
|
| 10826 |
+
"learning_rate": 4.374561961135726e-07,
|
| 10827 |
+
"loss": 5.9445,
|
| 10828 |
+
"step": 1541
|
| 10829 |
+
},
|
| 10830 |
+
{
|
| 10831 |
+
"epoch": 0.2565190268247037,
|
| 10832 |
+
"grad_norm": 1.3202399015426636,
|
| 10833 |
+
"learning_rate": 4.190519463368014e-07,
|
| 10834 |
+
"loss": 6.4695,
|
| 10835 |
+
"step": 1542
|
| 10836 |
+
},
|
| 10837 |
+
{
|
| 10838 |
+
"epoch": 0.2566853815762113,
|
| 10839 |
+
"grad_norm": 1.4632635116577148,
|
| 10840 |
+
"learning_rate": 4.0104239177454206e-07,
|
| 10841 |
+
"loss": 5.0259,
|
| 10842 |
+
"step": 1543
|
| 10843 |
+
},
|
| 10844 |
+
{
|
| 10845 |
+
"epoch": 0.25685173632771885,
|
| 10846 |
+
"grad_norm": 1.4013946056365967,
|
| 10847 |
+
"learning_rate": 3.834276038087836e-07,
|
| 10848 |
+
"loss": 5.2433,
|
| 10849 |
+
"step": 1544
|
| 10850 |
+
},
|
| 10851 |
+
{
|
| 10852 |
+
"epoch": 0.25701809107922646,
|
| 10853 |
+
"grad_norm": 1.3530707359313965,
|
| 10854 |
+
"learning_rate": 3.662076522568225e-07,
|
| 10855 |
+
"loss": 5.617,
|
| 10856 |
+
"step": 1545
|
| 10857 |
+
},
|
| 10858 |
+
{
|
| 10859 |
+
"epoch": 0.257184445830734,
|
| 10860 |
+
"grad_norm": 1.214060664176941,
|
| 10861 |
+
"learning_rate": 3.4938260537098476e-07,
|
| 10862 |
+
"loss": 5.3957,
|
| 10863 |
+
"step": 1546
|
| 10864 |
+
},
|
| 10865 |
+
{
|
| 10866 |
+
"epoch": 0.25735080058224163,
|
| 10867 |
+
"grad_norm": 1.3363624811172485,
|
| 10868 |
+
"learning_rate": 3.3295252983838177e-07,
|
| 10869 |
+
"loss": 5.2809,
|
| 10870 |
+
"step": 1547
|
| 10871 |
+
},
|
| 10872 |
+
{
|
| 10873 |
+
"epoch": 0.2575171553337492,
|
| 10874 |
+
"grad_norm": 1.6313562393188477,
|
| 10875 |
+
"learning_rate": 3.1691749078064384e-07,
|
| 10876 |
+
"loss": 6.0187,
|
| 10877 |
+
"step": 1548
|
| 10878 |
+
},
|
| 10879 |
+
{
|
| 10880 |
+
"epoch": 0.2576835100852568,
|
| 10881 |
+
"grad_norm": 1.3106491565704346,
|
| 10882 |
+
"learning_rate": 3.0127755175362037e-07,
|
| 10883 |
+
"loss": 5.4747,
|
| 10884 |
+
"step": 1549
|
| 10885 |
+
},
|
| 10886 |
+
{
|
| 10887 |
+
"epoch": 0.2578498648367644,
|
| 10888 |
+
"grad_norm": 1.4089770317077637,
|
| 10889 |
+
"learning_rate": 2.8603277474716917e-07,
|
| 10890 |
+
"loss": 5.7946,
|
| 10891 |
+
"step": 1550
|
| 10892 |
+
},
|
| 10893 |
+
{
|
| 10894 |
+
"epoch": 0.258016219588272,
|
| 10895 |
+
"grad_norm": 1.3027589321136475,
|
| 10896 |
+
"learning_rate": 2.711832201849229e-07,
|
| 10897 |
+
"loss": 5.0376,
|
| 10898 |
+
"step": 1551
|
| 10899 |
+
},
|
| 10900 |
+
{
|
| 10901 |
+
"epoch": 0.2581825743397796,
|
| 10902 |
+
"grad_norm": 1.3077501058578491,
|
| 10903 |
+
"learning_rate": 2.567289469239786e-07,
|
| 10904 |
+
"loss": 4.9598,
|
| 10905 |
+
"step": 1552
|
| 10906 |
+
},
|
| 10907 |
+
{
|
| 10908 |
+
"epoch": 0.25834892909128715,
|
| 10909 |
+
"grad_norm": 1.3609598875045776,
|
| 10910 |
+
"learning_rate": 2.4267001225474207e-07,
|
| 10911 |
+
"loss": 5.1608,
|
| 10912 |
+
"step": 1553
|
| 10913 |
+
},
|
| 10914 |
+
{
|
| 10915 |
+
"epoch": 0.25851528384279476,
|
| 10916 |
+
"grad_norm": 1.183615803718567,
|
| 10917 |
+
"learning_rate": 2.2900647190068348e-07,
|
| 10918 |
+
"loss": 5.3375,
|
| 10919 |
+
"step": 1554
|
| 10920 |
+
},
|
| 10921 |
+
{
|
| 10922 |
+
"epoch": 0.2586816385943024,
|
| 10923 |
+
"grad_norm": 1.1442736387252808,
|
| 10924 |
+
"learning_rate": 2.1573838001807123e-07,
|
| 10925 |
+
"loss": 5.3008,
|
| 10926 |
+
"step": 1555
|
| 10927 |
+
},
|
| 10928 |
+
{
|
| 10929 |
+
"epoch": 0.25884799334580993,
|
| 10930 |
+
"grad_norm": 1.2758674621582031,
|
| 10931 |
+
"learning_rate": 2.0286578919581632e-07,
|
| 10932 |
+
"loss": 4.9198,
|
| 10933 |
+
"step": 1556
|
| 10934 |
+
},
|
| 10935 |
+
{
|
| 10936 |
+
"epoch": 0.25901434809731755,
|
| 10937 |
+
"grad_norm": 1.405775785446167,
|
| 10938 |
+
"learning_rate": 1.9038875045520598e-07,
|
| 10939 |
+
"loss": 5.6042,
|
| 10940 |
+
"step": 1557
|
| 10941 |
+
},
|
| 10942 |
+
{
|
| 10943 |
+
"epoch": 0.2591807028488251,
|
| 10944 |
+
"grad_norm": 1.2812808752059937,
|
| 10945 |
+
"learning_rate": 1.7830731324977036e-07,
|
| 10946 |
+
"loss": 5.4369,
|
| 10947 |
+
"step": 1558
|
| 10948 |
+
},
|
| 10949 |
+
{
|
| 10950 |
+
"epoch": 0.2593470576003327,
|
| 10951 |
+
"grad_norm": 2.5925631523132324,
|
| 10952 |
+
"learning_rate": 1.6662152546500499e-07,
|
| 10953 |
+
"loss": 6.1546,
|
| 10954 |
+
"step": 1559
|
| 10955 |
+
},
|
| 10956 |
+
{
|
| 10957 |
+
"epoch": 0.2595134123518403,
|
| 10958 |
+
"grad_norm": 1.2293897867202759,
|
| 10959 |
+
"learning_rate": 1.5533143341827094e-07,
|
| 10960 |
+
"loss": 5.5394,
|
| 10961 |
+
"step": 1560
|
| 10962 |
+
},
|
| 10963 |
+
{
|
| 10964 |
+
"epoch": 0.2596797671033479,
|
| 10965 |
+
"grad_norm": 1.2540256977081299,
|
| 10966 |
+
"learning_rate": 1.4443708185853943e-07,
|
| 10967 |
+
"loss": 6.1312,
|
| 10968 |
+
"step": 1561
|
| 10969 |
+
},
|
| 10970 |
+
{
|
| 10971 |
+
"epoch": 0.2598461218548555,
|
| 10972 |
+
"grad_norm": 1.3341058492660522,
|
| 10973 |
+
"learning_rate": 1.3393851396623634e-07,
|
| 10974 |
+
"loss": 5.0944,
|
| 10975 |
+
"step": 1562
|
| 10976 |
+
},
|
| 10977 |
+
{
|
| 10978 |
+
"epoch": 0.26001247660636306,
|
| 10979 |
+
"grad_norm": 1.4394543170928955,
|
| 10980 |
+
"learning_rate": 1.238357713530869e-07,
|
| 10981 |
+
"loss": 5.9553,
|
| 10982 |
+
"step": 1563
|
| 10983 |
+
},
|
| 10984 |
+
{
|
| 10985 |
+
"epoch": 0.2601788313578707,
|
| 10986 |
+
"grad_norm": 1.5656707286834717,
|
| 10987 |
+
"learning_rate": 1.1412889406192673e-07,
|
| 10988 |
+
"loss": 6.3283,
|
| 10989 |
+
"step": 1564
|
| 10990 |
+
},
|
| 10991 |
+
{
|
| 10992 |
+
"epoch": 0.26034518610937823,
|
| 10993 |
+
"grad_norm": 1.3686655759811401,
|
| 10994 |
+
"learning_rate": 1.0481792056655782e-07,
|
| 10995 |
+
"loss": 5.4796,
|
| 10996 |
+
"step": 1565
|
| 10997 |
+
},
|
| 10998 |
+
{
|
| 10999 |
+
"epoch": 0.26051154086088585,
|
| 11000 |
+
"grad_norm": 1.3350985050201416,
|
| 11001 |
+
"learning_rate": 9.590288777161505e-08,
|
| 11002 |
+
"loss": 5.196,
|
| 11003 |
+
"step": 1566
|
| 11004 |
+
},
|
| 11005 |
+
{
|
| 11006 |
+
"epoch": 0.2606778956123934,
|
| 11007 |
+
"grad_norm": 1.3551819324493408,
|
| 11008 |
+
"learning_rate": 8.738383101235537e-08,
|
| 11009 |
+
"loss": 5.0922,
|
| 11010 |
+
"step": 1567
|
| 11011 |
+
},
|
| 11012 |
+
{
|
| 11013 |
+
"epoch": 0.260844250363901,
|
| 11014 |
+
"grad_norm": 1.3822896480560303,
|
| 11015 |
+
"learning_rate": 7.926078405460224e-08,
|
| 11016 |
+
"loss": 6.8186,
|
| 11017 |
+
"step": 1568
|
| 11018 |
+
},
|
| 11019 |
+
{
|
| 11020 |
+
"epoch": 0.26101060511540863,
|
| 11021 |
+
"grad_norm": 1.3261609077453613,
|
| 11022 |
+
"learning_rate": 7.153377909455694e-08,
|
| 11023 |
+
"loss": 5.2518,
|
| 11024 |
+
"step": 1569
|
| 11025 |
+
},
|
| 11026 |
+
{
|
| 11027 |
+
"epoch": 0.2611769598669162,
|
| 11028 |
+
"grad_norm": 1.5050151348114014,
|
| 11029 |
+
"learning_rate": 6.420284675865418e-08,
|
| 11030 |
+
"loss": 5.4144,
|
| 11031 |
+
"step": 1570
|
| 11032 |
+
},
|
| 11033 |
+
{
|
| 11034 |
+
"epoch": 0.2613433146184238,
|
| 11035 |
+
"grad_norm": 1.3267443180084229,
|
| 11036 |
+
"learning_rate": 5.726801610351773e-08,
|
| 11037 |
+
"loss": 6.5885,
|
| 11038 |
+
"step": 1571
|
| 11039 |
+
},
|
| 11040 |
+
{
|
| 11041 |
+
"epoch": 0.26150966936993136,
|
| 11042 |
+
"grad_norm": 1.5229625701904297,
|
| 11043 |
+
"learning_rate": 5.072931461576058e-08,
|
| 11044 |
+
"loss": 5.5711,
|
| 11045 |
+
"step": 1572
|
| 11046 |
+
},
|
| 11047 |
+
{
|
| 11048 |
+
"epoch": 0.261676024121439,
|
| 11049 |
+
"grad_norm": 1.3895554542541504,
|
| 11050 |
+
"learning_rate": 4.458676821194052e-08,
|
| 11051 |
+
"loss": 5.628,
|
| 11052 |
+
"step": 1573
|
| 11053 |
+
},
|
| 11054 |
+
{
|
| 11055 |
+
"epoch": 0.26184237887294654,
|
| 11056 |
+
"grad_norm": 1.421586036682129,
|
| 11057 |
+
"learning_rate": 3.8840401238415814e-08,
|
| 11058 |
+
"loss": 6.2261,
|
| 11059 |
+
"step": 1574
|
| 11060 |
+
},
|
| 11061 |
+
{
|
| 11062 |
+
"epoch": 0.26200873362445415,
|
| 11063 |
+
"grad_norm": 1.3471659421920776,
|
| 11064 |
+
"learning_rate": 3.3490236471256375e-08,
|
| 11065 |
+
"loss": 4.9101,
|
| 11066 |
+
"step": 1575
|
| 11067 |
+
},
|
| 11068 |
+
{
|
| 11069 |
+
"epoch": 0.26217508837596176,
|
| 11070 |
+
"grad_norm": 1.6335341930389404,
|
| 11071 |
+
"learning_rate": 2.853629511617717e-08,
|
| 11072 |
+
"loss": 5.5529,
|
| 11073 |
+
"step": 1576
|
| 11074 |
+
},
|
| 11075 |
+
{
|
| 11076 |
+
"epoch": 0.2623414431274693,
|
| 11077 |
+
"grad_norm": 1.3969974517822266,
|
| 11078 |
+
"learning_rate": 2.3978596808427177e-08,
|
| 11079 |
+
"loss": 4.9099,
|
| 11080 |
+
"step": 1577
|
| 11081 |
+
},
|
| 11082 |
+
{
|
| 11083 |
+
"epoch": 0.26250779787897693,
|
| 11084 |
+
"grad_norm": 1.2568950653076172,
|
| 11085 |
+
"learning_rate": 1.981715961272279e-08,
|
| 11086 |
+
"loss": 5.4438,
|
| 11087 |
+
"step": 1578
|
| 11088 |
+
},
|
| 11089 |
+
{
|
| 11090 |
+
"epoch": 0.2626741526304845,
|
| 11091 |
+
"grad_norm": 1.9526634216308594,
|
| 11092 |
+
"learning_rate": 1.6052000023192292e-08,
|
| 11093 |
+
"loss": 6.8875,
|
| 11094 |
+
"step": 1579
|
| 11095 |
+
},
|
| 11096 |
+
{
|
| 11097 |
+
"epoch": 0.2628405073819921,
|
| 11098 |
+
"grad_norm": 1.293441891670227,
|
| 11099 |
+
"learning_rate": 1.2683132963253742e-08,
|
| 11100 |
+
"loss": 5.3637,
|
| 11101 |
+
"step": 1580
|
| 11102 |
+
},
|
| 11103 |
+
{
|
| 11104 |
+
"epoch": 0.26300686213349966,
|
| 11105 |
+
"grad_norm": 1.7147483825683594,
|
| 11106 |
+
"learning_rate": 9.710571785626065e-09,
|
| 11107 |
+
"loss": 6.42,
|
| 11108 |
+
"step": 1581
|
| 11109 |
+
},
|
| 11110 |
+
{
|
| 11111 |
+
"epoch": 0.2631732168850073,
|
| 11112 |
+
"grad_norm": 1.2214791774749756,
|
| 11113 |
+
"learning_rate": 7.134328272240254e-09,
|
| 11114 |
+
"loss": 5.3088,
|
| 11115 |
+
"step": 1582
|
| 11116 |
+
},
|
| 11117 |
+
{
|
| 11118 |
+
"epoch": 0.2633395716365149,
|
| 11119 |
+
"grad_norm": 1.3537429571151733,
|
| 11120 |
+
"learning_rate": 4.95441263420604e-09,
|
| 11121 |
+
"loss": 5.455,
|
| 11122 |
+
"step": 1583
|
| 11123 |
+
},
|
| 11124 |
+
{
|
| 11125 |
+
"epoch": 0.26350592638802245,
|
| 11126 |
+
"grad_norm": 1.745126724243164,
|
| 11127 |
+
"learning_rate": 3.170833511734195e-09,
|
| 11128 |
+
"loss": 5.7558,
|
| 11129 |
+
"step": 1584
|
| 11130 |
+
},
|
| 11131 |
+
{
|
| 11132 |
+
"epoch": 0.26367228113953006,
|
| 11133 |
+
"grad_norm": 1.3130848407745361,
|
| 11134 |
+
"learning_rate": 1.7835979741698261e-09,
|
| 11135 |
+
"loss": 4.8823,
|
| 11136 |
+
"step": 1585
|
| 11137 |
+
},
|
| 11138 |
+
{
|
| 11139 |
+
"epoch": 0.2638386358910376,
|
| 11140 |
+
"grad_norm": 1.3555537462234497,
|
| 11141 |
+
"learning_rate": 7.927115198924639e-10,
|
| 11142 |
+
"loss": 5.3676,
|
| 11143 |
+
"step": 1586
|
| 11144 |
+
},
|
| 11145 |
+
{
|
| 11146 |
+
"epoch": 0.26400499064254523,
|
| 11147 |
+
"grad_norm": 1.5286144018173218,
|
| 11148 |
+
"learning_rate": 1.9817807634936457e-10,
|
| 11149 |
+
"loss": 5.9778,
|
| 11150 |
+
"step": 1587
|
| 11151 |
+
},
|
| 11152 |
+
{
|
| 11153 |
+
"epoch": 0.2641713453940528,
|
| 11154 |
+
"grad_norm": 1.5342683792114258,
|
| 11155 |
+
"learning_rate": 0.0,
|
| 11156 |
+
"loss": 5.3142,
|
| 11157 |
+
"step": 1588
|
| 11158 |
+
},
|
| 11159 |
+
{
|
| 11160 |
+
"epoch": 0.2641713453940528,
|
| 11161 |
+
"eval_loss": 1.390223503112793,
|
| 11162 |
+
"eval_runtime": 141.2025,
|
| 11163 |
+
"eval_samples_per_second": 17.925,
|
| 11164 |
+
"eval_steps_per_second": 8.966,
|
| 11165 |
+
"step": 1588
|
| 11166 |
}
|
| 11167 |
],
|
| 11168 |
"logging_steps": 1,
|
|
|
|
| 11177 |
"should_evaluate": false,
|
| 11178 |
"should_log": false,
|
| 11179 |
"should_save": true,
|
| 11180 |
+
"should_training_stop": true
|
| 11181 |
},
|
| 11182 |
"attributes": {}
|
| 11183 |
}
|
| 11184 |
},
|
| 11185 |
+
"total_flos": 4.964020502428385e+17,
|
| 11186 |
"train_batch_size": 2,
|
| 11187 |
"trial_name": null,
|
| 11188 |
"trial_params": null
|