Training in progress, step 3488, checkpoint
Browse files
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 686648325
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:097d91326707b22e3d5cfac4a4aae8d055678adc91f959bab3c6dc8aecd60992
|
| 3 |
size 686648325
|
last-checkpoint/pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 343308717
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5926c6173c1851bd141d5a8447d9b3983fb41372508d6b4ef7bed418dccc4e69
|
| 3 |
size 343308717
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cfeb26fd6ea91c8f011a03fd3b1e7297c674b281055de3840ccbc68620dd4b97
|
| 3 |
size 627
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 3.
|
| 5 |
"eval_steps": 500,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -19660,13 +19660,1321 @@
|
|
| 19660 |
"learning_rate": 0.00012976190476190477,
|
| 19661 |
"loss": 0.5561,
|
| 19662 |
"step": 3270
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19663 |
}
|
| 19664 |
],
|
| 19665 |
"logging_steps": 1,
|
| 19666 |
"max_steps": 4360,
|
| 19667 |
"num_train_epochs": 5,
|
| 19668 |
"save_steps": 218,
|
| 19669 |
-
"total_flos": 6.
|
| 19670 |
"trial_name": null,
|
| 19671 |
"trial_params": null
|
| 19672 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 3.9967772545789844,
|
| 5 |
"eval_steps": 500,
|
| 6 |
+
"global_step": 3488,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 19660 |
"learning_rate": 0.00012976190476190477,
|
| 19661 |
"loss": 0.5561,
|
| 19662 |
"step": 3270
|
| 19663 |
+
},
|
| 19664 |
+
{
|
| 19665 |
+
"epoch": 3.75,
|
| 19666 |
+
"learning_rate": 0.00012964285714285714,
|
| 19667 |
+
"loss": 0.5627,
|
| 19668 |
+
"step": 3271
|
| 19669 |
+
},
|
| 19670 |
+
{
|
| 19671 |
+
"epoch": 3.75,
|
| 19672 |
+
"learning_rate": 0.00012952380952380954,
|
| 19673 |
+
"loss": 0.4891,
|
| 19674 |
+
"step": 3272
|
| 19675 |
+
},
|
| 19676 |
+
{
|
| 19677 |
+
"epoch": 3.75,
|
| 19678 |
+
"learning_rate": 0.00012940476190476192,
|
| 19679 |
+
"loss": 0.5672,
|
| 19680 |
+
"step": 3273
|
| 19681 |
+
},
|
| 19682 |
+
{
|
| 19683 |
+
"epoch": 3.75,
|
| 19684 |
+
"learning_rate": 0.0001292857142857143,
|
| 19685 |
+
"loss": 0.521,
|
| 19686 |
+
"step": 3274
|
| 19687 |
+
},
|
| 19688 |
+
{
|
| 19689 |
+
"epoch": 3.75,
|
| 19690 |
+
"learning_rate": 0.00012916666666666667,
|
| 19691 |
+
"loss": 0.5842,
|
| 19692 |
+
"step": 3275
|
| 19693 |
+
},
|
| 19694 |
+
{
|
| 19695 |
+
"epoch": 3.75,
|
| 19696 |
+
"learning_rate": 0.00012904761904761907,
|
| 19697 |
+
"loss": 0.5464,
|
| 19698 |
+
"step": 3276
|
| 19699 |
+
},
|
| 19700 |
+
{
|
| 19701 |
+
"epoch": 3.75,
|
| 19702 |
+
"learning_rate": 0.00012892857142857145,
|
| 19703 |
+
"loss": 0.5761,
|
| 19704 |
+
"step": 3277
|
| 19705 |
+
},
|
| 19706 |
+
{
|
| 19707 |
+
"epoch": 3.76,
|
| 19708 |
+
"learning_rate": 0.00012880952380952382,
|
| 19709 |
+
"loss": 0.5626,
|
| 19710 |
+
"step": 3278
|
| 19711 |
+
},
|
| 19712 |
+
{
|
| 19713 |
+
"epoch": 3.76,
|
| 19714 |
+
"learning_rate": 0.0001286904761904762,
|
| 19715 |
+
"loss": 0.5643,
|
| 19716 |
+
"step": 3279
|
| 19717 |
+
},
|
| 19718 |
+
{
|
| 19719 |
+
"epoch": 3.76,
|
| 19720 |
+
"learning_rate": 0.00012857142857142858,
|
| 19721 |
+
"loss": 0.5404,
|
| 19722 |
+
"step": 3280
|
| 19723 |
+
},
|
| 19724 |
+
{
|
| 19725 |
+
"epoch": 3.76,
|
| 19726 |
+
"learning_rate": 0.00012845238095238098,
|
| 19727 |
+
"loss": 0.5833,
|
| 19728 |
+
"step": 3281
|
| 19729 |
+
},
|
| 19730 |
+
{
|
| 19731 |
+
"epoch": 3.76,
|
| 19732 |
+
"learning_rate": 0.00012833333333333335,
|
| 19733 |
+
"loss": 0.563,
|
| 19734 |
+
"step": 3282
|
| 19735 |
+
},
|
| 19736 |
+
{
|
| 19737 |
+
"epoch": 3.76,
|
| 19738 |
+
"learning_rate": 0.00012821428571428573,
|
| 19739 |
+
"loss": 0.5612,
|
| 19740 |
+
"step": 3283
|
| 19741 |
+
},
|
| 19742 |
+
{
|
| 19743 |
+
"epoch": 3.76,
|
| 19744 |
+
"learning_rate": 0.00012809523809523808,
|
| 19745 |
+
"loss": 0.5565,
|
| 19746 |
+
"step": 3284
|
| 19747 |
+
},
|
| 19748 |
+
{
|
| 19749 |
+
"epoch": 3.76,
|
| 19750 |
+
"learning_rate": 0.00012797619047619048,
|
| 19751 |
+
"loss": 0.554,
|
| 19752 |
+
"step": 3285
|
| 19753 |
+
},
|
| 19754 |
+
{
|
| 19755 |
+
"epoch": 3.77,
|
| 19756 |
+
"learning_rate": 0.00012785714285714286,
|
| 19757 |
+
"loss": 0.5603,
|
| 19758 |
+
"step": 3286
|
| 19759 |
+
},
|
| 19760 |
+
{
|
| 19761 |
+
"epoch": 3.77,
|
| 19762 |
+
"learning_rate": 0.00012773809523809523,
|
| 19763 |
+
"loss": 0.5539,
|
| 19764 |
+
"step": 3287
|
| 19765 |
+
},
|
| 19766 |
+
{
|
| 19767 |
+
"epoch": 3.77,
|
| 19768 |
+
"learning_rate": 0.0001276190476190476,
|
| 19769 |
+
"loss": 0.5602,
|
| 19770 |
+
"step": 3288
|
| 19771 |
+
},
|
| 19772 |
+
{
|
| 19773 |
+
"epoch": 3.77,
|
| 19774 |
+
"learning_rate": 0.0001275,
|
| 19775 |
+
"loss": 0.5153,
|
| 19776 |
+
"step": 3289
|
| 19777 |
+
},
|
| 19778 |
+
{
|
| 19779 |
+
"epoch": 3.77,
|
| 19780 |
+
"learning_rate": 0.00012738095238095238,
|
| 19781 |
+
"loss": 0.5646,
|
| 19782 |
+
"step": 3290
|
| 19783 |
+
},
|
| 19784 |
+
{
|
| 19785 |
+
"epoch": 3.77,
|
| 19786 |
+
"learning_rate": 0.00012726190476190476,
|
| 19787 |
+
"loss": 0.5287,
|
| 19788 |
+
"step": 3291
|
| 19789 |
+
},
|
| 19790 |
+
{
|
| 19791 |
+
"epoch": 3.77,
|
| 19792 |
+
"learning_rate": 0.00012714285714285714,
|
| 19793 |
+
"loss": 0.5712,
|
| 19794 |
+
"step": 3292
|
| 19795 |
+
},
|
| 19796 |
+
{
|
| 19797 |
+
"epoch": 3.77,
|
| 19798 |
+
"learning_rate": 0.0001270238095238095,
|
| 19799 |
+
"loss": 0.5944,
|
| 19800 |
+
"step": 3293
|
| 19801 |
+
},
|
| 19802 |
+
{
|
| 19803 |
+
"epoch": 3.77,
|
| 19804 |
+
"learning_rate": 0.00012690476190476191,
|
| 19805 |
+
"loss": 0.5344,
|
| 19806 |
+
"step": 3294
|
| 19807 |
+
},
|
| 19808 |
+
{
|
| 19809 |
+
"epoch": 3.78,
|
| 19810 |
+
"learning_rate": 0.0001267857142857143,
|
| 19811 |
+
"loss": 0.5199,
|
| 19812 |
+
"step": 3295
|
| 19813 |
+
},
|
| 19814 |
+
{
|
| 19815 |
+
"epoch": 3.78,
|
| 19816 |
+
"learning_rate": 0.00012666666666666666,
|
| 19817 |
+
"loss": 0.5239,
|
| 19818 |
+
"step": 3296
|
| 19819 |
+
},
|
| 19820 |
+
{
|
| 19821 |
+
"epoch": 3.78,
|
| 19822 |
+
"learning_rate": 0.00012654761904761904,
|
| 19823 |
+
"loss": 0.5981,
|
| 19824 |
+
"step": 3297
|
| 19825 |
+
},
|
| 19826 |
+
{
|
| 19827 |
+
"epoch": 3.78,
|
| 19828 |
+
"learning_rate": 0.00012642857142857144,
|
| 19829 |
+
"loss": 0.5663,
|
| 19830 |
+
"step": 3298
|
| 19831 |
+
},
|
| 19832 |
+
{
|
| 19833 |
+
"epoch": 3.78,
|
| 19834 |
+
"learning_rate": 0.00012630952380952382,
|
| 19835 |
+
"loss": 0.5413,
|
| 19836 |
+
"step": 3299
|
| 19837 |
+
},
|
| 19838 |
+
{
|
| 19839 |
+
"epoch": 3.78,
|
| 19840 |
+
"learning_rate": 0.0001261904761904762,
|
| 19841 |
+
"loss": 0.5619,
|
| 19842 |
+
"step": 3300
|
| 19843 |
+
},
|
| 19844 |
+
{
|
| 19845 |
+
"epoch": 3.78,
|
| 19846 |
+
"learning_rate": 0.00012607142857142857,
|
| 19847 |
+
"loss": 0.5107,
|
| 19848 |
+
"step": 3301
|
| 19849 |
+
},
|
| 19850 |
+
{
|
| 19851 |
+
"epoch": 3.78,
|
| 19852 |
+
"learning_rate": 0.00012595238095238094,
|
| 19853 |
+
"loss": 0.5318,
|
| 19854 |
+
"step": 3302
|
| 19855 |
+
},
|
| 19856 |
+
{
|
| 19857 |
+
"epoch": 3.78,
|
| 19858 |
+
"learning_rate": 0.00012583333333333335,
|
| 19859 |
+
"loss": 0.5652,
|
| 19860 |
+
"step": 3303
|
| 19861 |
+
},
|
| 19862 |
+
{
|
| 19863 |
+
"epoch": 3.79,
|
| 19864 |
+
"learning_rate": 0.00012571428571428572,
|
| 19865 |
+
"loss": 0.5161,
|
| 19866 |
+
"step": 3304
|
| 19867 |
+
},
|
| 19868 |
+
{
|
| 19869 |
+
"epoch": 3.79,
|
| 19870 |
+
"learning_rate": 0.0001255952380952381,
|
| 19871 |
+
"loss": 0.5231,
|
| 19872 |
+
"step": 3305
|
| 19873 |
+
},
|
| 19874 |
+
{
|
| 19875 |
+
"epoch": 3.79,
|
| 19876 |
+
"learning_rate": 0.00012547619047619047,
|
| 19877 |
+
"loss": 0.6096,
|
| 19878 |
+
"step": 3306
|
| 19879 |
+
},
|
| 19880 |
+
{
|
| 19881 |
+
"epoch": 3.79,
|
| 19882 |
+
"learning_rate": 0.00012535714285714285,
|
| 19883 |
+
"loss": 0.5764,
|
| 19884 |
+
"step": 3307
|
| 19885 |
+
},
|
| 19886 |
+
{
|
| 19887 |
+
"epoch": 3.79,
|
| 19888 |
+
"learning_rate": 0.00012523809523809525,
|
| 19889 |
+
"loss": 0.5406,
|
| 19890 |
+
"step": 3308
|
| 19891 |
+
},
|
| 19892 |
+
{
|
| 19893 |
+
"epoch": 3.79,
|
| 19894 |
+
"learning_rate": 0.00012511904761904763,
|
| 19895 |
+
"loss": 0.535,
|
| 19896 |
+
"step": 3309
|
| 19897 |
+
},
|
| 19898 |
+
{
|
| 19899 |
+
"epoch": 3.79,
|
| 19900 |
+
"learning_rate": 0.000125,
|
| 19901 |
+
"loss": 0.5547,
|
| 19902 |
+
"step": 3310
|
| 19903 |
+
},
|
| 19904 |
+
{
|
| 19905 |
+
"epoch": 3.79,
|
| 19906 |
+
"learning_rate": 0.00012488095238095238,
|
| 19907 |
+
"loss": 0.5922,
|
| 19908 |
+
"step": 3311
|
| 19909 |
+
},
|
| 19910 |
+
{
|
| 19911 |
+
"epoch": 3.8,
|
| 19912 |
+
"learning_rate": 0.00012476190476190478,
|
| 19913 |
+
"loss": 0.5049,
|
| 19914 |
+
"step": 3312
|
| 19915 |
+
},
|
| 19916 |
+
{
|
| 19917 |
+
"epoch": 3.8,
|
| 19918 |
+
"learning_rate": 0.00012464285714285716,
|
| 19919 |
+
"loss": 0.5783,
|
| 19920 |
+
"step": 3313
|
| 19921 |
+
},
|
| 19922 |
+
{
|
| 19923 |
+
"epoch": 3.8,
|
| 19924 |
+
"learning_rate": 0.00012452380952380953,
|
| 19925 |
+
"loss": 0.5598,
|
| 19926 |
+
"step": 3314
|
| 19927 |
+
},
|
| 19928 |
+
{
|
| 19929 |
+
"epoch": 3.8,
|
| 19930 |
+
"learning_rate": 0.0001244047619047619,
|
| 19931 |
+
"loss": 0.5645,
|
| 19932 |
+
"step": 3315
|
| 19933 |
+
},
|
| 19934 |
+
{
|
| 19935 |
+
"epoch": 3.8,
|
| 19936 |
+
"learning_rate": 0.00012428571428571428,
|
| 19937 |
+
"loss": 0.5337,
|
| 19938 |
+
"step": 3316
|
| 19939 |
+
},
|
| 19940 |
+
{
|
| 19941 |
+
"epoch": 3.8,
|
| 19942 |
+
"learning_rate": 0.00012416666666666669,
|
| 19943 |
+
"loss": 0.5235,
|
| 19944 |
+
"step": 3317
|
| 19945 |
+
},
|
| 19946 |
+
{
|
| 19947 |
+
"epoch": 3.8,
|
| 19948 |
+
"learning_rate": 0.00012404761904761906,
|
| 19949 |
+
"loss": 0.5892,
|
| 19950 |
+
"step": 3318
|
| 19951 |
+
},
|
| 19952 |
+
{
|
| 19953 |
+
"epoch": 3.8,
|
| 19954 |
+
"learning_rate": 0.00012392857142857144,
|
| 19955 |
+
"loss": 0.5443,
|
| 19956 |
+
"step": 3319
|
| 19957 |
+
},
|
| 19958 |
+
{
|
| 19959 |
+
"epoch": 3.8,
|
| 19960 |
+
"learning_rate": 0.0001238095238095238,
|
| 19961 |
+
"loss": 0.523,
|
| 19962 |
+
"step": 3320
|
| 19963 |
+
},
|
| 19964 |
+
{
|
| 19965 |
+
"epoch": 3.81,
|
| 19966 |
+
"learning_rate": 0.00012369047619047621,
|
| 19967 |
+
"loss": 0.5298,
|
| 19968 |
+
"step": 3321
|
| 19969 |
+
},
|
| 19970 |
+
{
|
| 19971 |
+
"epoch": 3.81,
|
| 19972 |
+
"learning_rate": 0.0001235714285714286,
|
| 19973 |
+
"loss": 0.542,
|
| 19974 |
+
"step": 3322
|
| 19975 |
+
},
|
| 19976 |
+
{
|
| 19977 |
+
"epoch": 3.81,
|
| 19978 |
+
"learning_rate": 0.00012345238095238097,
|
| 19979 |
+
"loss": 0.5393,
|
| 19980 |
+
"step": 3323
|
| 19981 |
+
},
|
| 19982 |
+
{
|
| 19983 |
+
"epoch": 3.81,
|
| 19984 |
+
"learning_rate": 0.00012333333333333334,
|
| 19985 |
+
"loss": 0.5885,
|
| 19986 |
+
"step": 3324
|
| 19987 |
+
},
|
| 19988 |
+
{
|
| 19989 |
+
"epoch": 3.81,
|
| 19990 |
+
"learning_rate": 0.00012321428571428572,
|
| 19991 |
+
"loss": 0.5862,
|
| 19992 |
+
"step": 3325
|
| 19993 |
+
},
|
| 19994 |
+
{
|
| 19995 |
+
"epoch": 3.81,
|
| 19996 |
+
"learning_rate": 0.00012309523809523812,
|
| 19997 |
+
"loss": 0.5234,
|
| 19998 |
+
"step": 3326
|
| 19999 |
+
},
|
| 20000 |
+
{
|
| 20001 |
+
"epoch": 3.81,
|
| 20002 |
+
"learning_rate": 0.0001229761904761905,
|
| 20003 |
+
"loss": 0.5646,
|
| 20004 |
+
"step": 3327
|
| 20005 |
+
},
|
| 20006 |
+
{
|
| 20007 |
+
"epoch": 3.81,
|
| 20008 |
+
"learning_rate": 0.00012285714285714287,
|
| 20009 |
+
"loss": 0.5288,
|
| 20010 |
+
"step": 3328
|
| 20011 |
+
},
|
| 20012 |
+
{
|
| 20013 |
+
"epoch": 3.81,
|
| 20014 |
+
"learning_rate": 0.00012273809523809525,
|
| 20015 |
+
"loss": 0.6347,
|
| 20016 |
+
"step": 3329
|
| 20017 |
+
},
|
| 20018 |
+
{
|
| 20019 |
+
"epoch": 3.82,
|
| 20020 |
+
"learning_rate": 0.00012261904761904762,
|
| 20021 |
+
"loss": 0.5096,
|
| 20022 |
+
"step": 3330
|
| 20023 |
+
},
|
| 20024 |
+
{
|
| 20025 |
+
"epoch": 3.82,
|
| 20026 |
+
"learning_rate": 0.00012250000000000002,
|
| 20027 |
+
"loss": 0.5294,
|
| 20028 |
+
"step": 3331
|
| 20029 |
+
},
|
| 20030 |
+
{
|
| 20031 |
+
"epoch": 3.82,
|
| 20032 |
+
"learning_rate": 0.0001223809523809524,
|
| 20033 |
+
"loss": 0.5694,
|
| 20034 |
+
"step": 3332
|
| 20035 |
+
},
|
| 20036 |
+
{
|
| 20037 |
+
"epoch": 3.82,
|
| 20038 |
+
"learning_rate": 0.00012226190476190477,
|
| 20039 |
+
"loss": 0.5453,
|
| 20040 |
+
"step": 3333
|
| 20041 |
+
},
|
| 20042 |
+
{
|
| 20043 |
+
"epoch": 3.82,
|
| 20044 |
+
"learning_rate": 0.00012214285714285715,
|
| 20045 |
+
"loss": 0.5998,
|
| 20046 |
+
"step": 3334
|
| 20047 |
+
},
|
| 20048 |
+
{
|
| 20049 |
+
"epoch": 3.82,
|
| 20050 |
+
"learning_rate": 0.00012202380952380954,
|
| 20051 |
+
"loss": 0.5548,
|
| 20052 |
+
"step": 3335
|
| 20053 |
+
},
|
| 20054 |
+
{
|
| 20055 |
+
"epoch": 3.82,
|
| 20056 |
+
"learning_rate": 0.00012190476190476193,
|
| 20057 |
+
"loss": 0.5072,
|
| 20058 |
+
"step": 3336
|
| 20059 |
+
},
|
| 20060 |
+
{
|
| 20061 |
+
"epoch": 3.82,
|
| 20062 |
+
"learning_rate": 0.00012178571428571428,
|
| 20063 |
+
"loss": 0.5302,
|
| 20064 |
+
"step": 3337
|
| 20065 |
+
},
|
| 20066 |
+
{
|
| 20067 |
+
"epoch": 3.82,
|
| 20068 |
+
"learning_rate": 0.00012166666666666667,
|
| 20069 |
+
"loss": 0.5195,
|
| 20070 |
+
"step": 3338
|
| 20071 |
+
},
|
| 20072 |
+
{
|
| 20073 |
+
"epoch": 3.83,
|
| 20074 |
+
"learning_rate": 0.00012154761904761904,
|
| 20075 |
+
"loss": 0.5753,
|
| 20076 |
+
"step": 3339
|
| 20077 |
+
},
|
| 20078 |
+
{
|
| 20079 |
+
"epoch": 3.83,
|
| 20080 |
+
"learning_rate": 0.00012142857142857143,
|
| 20081 |
+
"loss": 0.5509,
|
| 20082 |
+
"step": 3340
|
| 20083 |
+
},
|
| 20084 |
+
{
|
| 20085 |
+
"epoch": 3.83,
|
| 20086 |
+
"learning_rate": 0.0001213095238095238,
|
| 20087 |
+
"loss": 0.558,
|
| 20088 |
+
"step": 3341
|
| 20089 |
+
},
|
| 20090 |
+
{
|
| 20091 |
+
"epoch": 3.83,
|
| 20092 |
+
"learning_rate": 0.0001211904761904762,
|
| 20093 |
+
"loss": 0.551,
|
| 20094 |
+
"step": 3342
|
| 20095 |
+
},
|
| 20096 |
+
{
|
| 20097 |
+
"epoch": 3.83,
|
| 20098 |
+
"learning_rate": 0.00012107142857142857,
|
| 20099 |
+
"loss": 0.5268,
|
| 20100 |
+
"step": 3343
|
| 20101 |
+
},
|
| 20102 |
+
{
|
| 20103 |
+
"epoch": 3.83,
|
| 20104 |
+
"learning_rate": 0.00012095238095238095,
|
| 20105 |
+
"loss": 0.5207,
|
| 20106 |
+
"step": 3344
|
| 20107 |
+
},
|
| 20108 |
+
{
|
| 20109 |
+
"epoch": 3.83,
|
| 20110 |
+
"learning_rate": 0.00012083333333333333,
|
| 20111 |
+
"loss": 0.5635,
|
| 20112 |
+
"step": 3345
|
| 20113 |
+
},
|
| 20114 |
+
{
|
| 20115 |
+
"epoch": 3.83,
|
| 20116 |
+
"learning_rate": 0.00012071428571428571,
|
| 20117 |
+
"loss": 0.5585,
|
| 20118 |
+
"step": 3346
|
| 20119 |
+
},
|
| 20120 |
+
{
|
| 20121 |
+
"epoch": 3.84,
|
| 20122 |
+
"learning_rate": 0.0001205952380952381,
|
| 20123 |
+
"loss": 0.559,
|
| 20124 |
+
"step": 3347
|
| 20125 |
+
},
|
| 20126 |
+
{
|
| 20127 |
+
"epoch": 3.84,
|
| 20128 |
+
"learning_rate": 0.00012047619047619047,
|
| 20129 |
+
"loss": 0.5477,
|
| 20130 |
+
"step": 3348
|
| 20131 |
+
},
|
| 20132 |
+
{
|
| 20133 |
+
"epoch": 3.84,
|
| 20134 |
+
"learning_rate": 0.00012035714285714286,
|
| 20135 |
+
"loss": 0.5624,
|
| 20136 |
+
"step": 3349
|
| 20137 |
+
},
|
| 20138 |
+
{
|
| 20139 |
+
"epoch": 3.84,
|
| 20140 |
+
"learning_rate": 0.00012023809523809524,
|
| 20141 |
+
"loss": 0.5879,
|
| 20142 |
+
"step": 3350
|
| 20143 |
+
},
|
| 20144 |
+
{
|
| 20145 |
+
"epoch": 3.84,
|
| 20146 |
+
"learning_rate": 0.00012011904761904761,
|
| 20147 |
+
"loss": 0.573,
|
| 20148 |
+
"step": 3351
|
| 20149 |
+
},
|
| 20150 |
+
{
|
| 20151 |
+
"epoch": 3.84,
|
| 20152 |
+
"learning_rate": 0.00012,
|
| 20153 |
+
"loss": 0.5383,
|
| 20154 |
+
"step": 3352
|
| 20155 |
+
},
|
| 20156 |
+
{
|
| 20157 |
+
"epoch": 3.84,
|
| 20158 |
+
"learning_rate": 0.00011988095238095238,
|
| 20159 |
+
"loss": 0.5769,
|
| 20160 |
+
"step": 3353
|
| 20161 |
+
},
|
| 20162 |
+
{
|
| 20163 |
+
"epoch": 3.84,
|
| 20164 |
+
"learning_rate": 0.00011976190476190477,
|
| 20165 |
+
"loss": 0.5484,
|
| 20166 |
+
"step": 3354
|
| 20167 |
+
},
|
| 20168 |
+
{
|
| 20169 |
+
"epoch": 3.84,
|
| 20170 |
+
"learning_rate": 0.00011964285714285714,
|
| 20171 |
+
"loss": 0.5337,
|
| 20172 |
+
"step": 3355
|
| 20173 |
+
},
|
| 20174 |
+
{
|
| 20175 |
+
"epoch": 3.85,
|
| 20176 |
+
"learning_rate": 0.00011952380952380953,
|
| 20177 |
+
"loss": 0.5039,
|
| 20178 |
+
"step": 3356
|
| 20179 |
+
},
|
| 20180 |
+
{
|
| 20181 |
+
"epoch": 3.85,
|
| 20182 |
+
"learning_rate": 0.00011940476190476191,
|
| 20183 |
+
"loss": 0.5783,
|
| 20184 |
+
"step": 3357
|
| 20185 |
+
},
|
| 20186 |
+
{
|
| 20187 |
+
"epoch": 3.85,
|
| 20188 |
+
"learning_rate": 0.00011928571428571428,
|
| 20189 |
+
"loss": 0.5593,
|
| 20190 |
+
"step": 3358
|
| 20191 |
+
},
|
| 20192 |
+
{
|
| 20193 |
+
"epoch": 3.85,
|
| 20194 |
+
"learning_rate": 0.00011916666666666667,
|
| 20195 |
+
"loss": 0.5418,
|
| 20196 |
+
"step": 3359
|
| 20197 |
+
},
|
| 20198 |
+
{
|
| 20199 |
+
"epoch": 3.85,
|
| 20200 |
+
"learning_rate": 0.00011904761904761905,
|
| 20201 |
+
"loss": 0.5513,
|
| 20202 |
+
"step": 3360
|
| 20203 |
+
},
|
| 20204 |
+
{
|
| 20205 |
+
"epoch": 3.85,
|
| 20206 |
+
"learning_rate": 0.00011892857142857144,
|
| 20207 |
+
"loss": 0.5405,
|
| 20208 |
+
"step": 3361
|
| 20209 |
+
},
|
| 20210 |
+
{
|
| 20211 |
+
"epoch": 3.85,
|
| 20212 |
+
"learning_rate": 0.00011880952380952381,
|
| 20213 |
+
"loss": 0.5328,
|
| 20214 |
+
"step": 3362
|
| 20215 |
+
},
|
| 20216 |
+
{
|
| 20217 |
+
"epoch": 3.85,
|
| 20218 |
+
"learning_rate": 0.0001186904761904762,
|
| 20219 |
+
"loss": 0.5687,
|
| 20220 |
+
"step": 3363
|
| 20221 |
+
},
|
| 20222 |
+
{
|
| 20223 |
+
"epoch": 3.85,
|
| 20224 |
+
"learning_rate": 0.00011857142857142858,
|
| 20225 |
+
"loss": 0.5104,
|
| 20226 |
+
"step": 3364
|
| 20227 |
+
},
|
| 20228 |
+
{
|
| 20229 |
+
"epoch": 3.86,
|
| 20230 |
+
"learning_rate": 0.00011845238095238097,
|
| 20231 |
+
"loss": 0.5461,
|
| 20232 |
+
"step": 3365
|
| 20233 |
+
},
|
| 20234 |
+
{
|
| 20235 |
+
"epoch": 3.86,
|
| 20236 |
+
"learning_rate": 0.00011833333333333334,
|
| 20237 |
+
"loss": 0.5474,
|
| 20238 |
+
"step": 3366
|
| 20239 |
+
},
|
| 20240 |
+
{
|
| 20241 |
+
"epoch": 3.86,
|
| 20242 |
+
"learning_rate": 0.00011821428571428572,
|
| 20243 |
+
"loss": 0.5646,
|
| 20244 |
+
"step": 3367
|
| 20245 |
+
},
|
| 20246 |
+
{
|
| 20247 |
+
"epoch": 3.86,
|
| 20248 |
+
"learning_rate": 0.0001180952380952381,
|
| 20249 |
+
"loss": 0.5622,
|
| 20250 |
+
"step": 3368
|
| 20251 |
+
},
|
| 20252 |
+
{
|
| 20253 |
+
"epoch": 3.86,
|
| 20254 |
+
"learning_rate": 0.00011797619047619048,
|
| 20255 |
+
"loss": 0.5355,
|
| 20256 |
+
"step": 3369
|
| 20257 |
+
},
|
| 20258 |
+
{
|
| 20259 |
+
"epoch": 3.86,
|
| 20260 |
+
"learning_rate": 0.00011785714285714287,
|
| 20261 |
+
"loss": 0.5475,
|
| 20262 |
+
"step": 3370
|
| 20263 |
+
},
|
| 20264 |
+
{
|
| 20265 |
+
"epoch": 3.86,
|
| 20266 |
+
"learning_rate": 0.00011773809523809525,
|
| 20267 |
+
"loss": 0.508,
|
| 20268 |
+
"step": 3371
|
| 20269 |
+
},
|
| 20270 |
+
{
|
| 20271 |
+
"epoch": 3.86,
|
| 20272 |
+
"learning_rate": 0.00011761904761904763,
|
| 20273 |
+
"loss": 0.5676,
|
| 20274 |
+
"step": 3372
|
| 20275 |
+
},
|
| 20276 |
+
{
|
| 20277 |
+
"epoch": 3.87,
|
| 20278 |
+
"learning_rate": 0.00011750000000000001,
|
| 20279 |
+
"loss": 0.5331,
|
| 20280 |
+
"step": 3373
|
| 20281 |
+
},
|
| 20282 |
+
{
|
| 20283 |
+
"epoch": 3.87,
|
| 20284 |
+
"learning_rate": 0.00011738095238095239,
|
| 20285 |
+
"loss": 0.5334,
|
| 20286 |
+
"step": 3374
|
| 20287 |
+
},
|
| 20288 |
+
{
|
| 20289 |
+
"epoch": 3.87,
|
| 20290 |
+
"learning_rate": 0.00011726190476190477,
|
| 20291 |
+
"loss": 0.5539,
|
| 20292 |
+
"step": 3375
|
| 20293 |
+
},
|
| 20294 |
+
{
|
| 20295 |
+
"epoch": 3.87,
|
| 20296 |
+
"learning_rate": 0.00011714285714285715,
|
| 20297 |
+
"loss": 0.5234,
|
| 20298 |
+
"step": 3376
|
| 20299 |
+
},
|
| 20300 |
+
{
|
| 20301 |
+
"epoch": 3.87,
|
| 20302 |
+
"learning_rate": 0.00011702380952380954,
|
| 20303 |
+
"loss": 0.5701,
|
| 20304 |
+
"step": 3377
|
| 20305 |
+
},
|
| 20306 |
+
{
|
| 20307 |
+
"epoch": 3.87,
|
| 20308 |
+
"learning_rate": 0.00011690476190476191,
|
| 20309 |
+
"loss": 0.5423,
|
| 20310 |
+
"step": 3378
|
| 20311 |
+
},
|
| 20312 |
+
{
|
| 20313 |
+
"epoch": 3.87,
|
| 20314 |
+
"learning_rate": 0.0001167857142857143,
|
| 20315 |
+
"loss": 0.5564,
|
| 20316 |
+
"step": 3379
|
| 20317 |
+
},
|
| 20318 |
+
{
|
| 20319 |
+
"epoch": 3.87,
|
| 20320 |
+
"learning_rate": 0.00011666666666666668,
|
| 20321 |
+
"loss": 0.5805,
|
| 20322 |
+
"step": 3380
|
| 20323 |
+
},
|
| 20324 |
+
{
|
| 20325 |
+
"epoch": 3.87,
|
| 20326 |
+
"learning_rate": 0.00011654761904761905,
|
| 20327 |
+
"loss": 0.5378,
|
| 20328 |
+
"step": 3381
|
| 20329 |
+
},
|
| 20330 |
+
{
|
| 20331 |
+
"epoch": 3.88,
|
| 20332 |
+
"learning_rate": 0.00011642857142857144,
|
| 20333 |
+
"loss": 0.5864,
|
| 20334 |
+
"step": 3382
|
| 20335 |
+
},
|
| 20336 |
+
{
|
| 20337 |
+
"epoch": 3.88,
|
| 20338 |
+
"learning_rate": 0.00011630952380952382,
|
| 20339 |
+
"loss": 0.5432,
|
| 20340 |
+
"step": 3383
|
| 20341 |
+
},
|
| 20342 |
+
{
|
| 20343 |
+
"epoch": 3.88,
|
| 20344 |
+
"learning_rate": 0.00011619047619047621,
|
| 20345 |
+
"loss": 0.5579,
|
| 20346 |
+
"step": 3384
|
| 20347 |
+
},
|
| 20348 |
+
{
|
| 20349 |
+
"epoch": 3.88,
|
| 20350 |
+
"learning_rate": 0.00011607142857142858,
|
| 20351 |
+
"loss": 0.563,
|
| 20352 |
+
"step": 3385
|
| 20353 |
+
},
|
| 20354 |
+
{
|
| 20355 |
+
"epoch": 3.88,
|
| 20356 |
+
"learning_rate": 0.00011595238095238097,
|
| 20357 |
+
"loss": 0.5067,
|
| 20358 |
+
"step": 3386
|
| 20359 |
+
},
|
| 20360 |
+
{
|
| 20361 |
+
"epoch": 3.88,
|
| 20362 |
+
"learning_rate": 0.00011583333333333335,
|
| 20363 |
+
"loss": 0.5605,
|
| 20364 |
+
"step": 3387
|
| 20365 |
+
},
|
| 20366 |
+
{
|
| 20367 |
+
"epoch": 3.88,
|
| 20368 |
+
"learning_rate": 0.00011571428571428574,
|
| 20369 |
+
"loss": 0.5559,
|
| 20370 |
+
"step": 3388
|
| 20371 |
+
},
|
| 20372 |
+
{
|
| 20373 |
+
"epoch": 3.88,
|
| 20374 |
+
"learning_rate": 0.00011559523809523809,
|
| 20375 |
+
"loss": 0.5608,
|
| 20376 |
+
"step": 3389
|
| 20377 |
+
},
|
| 20378 |
+
{
|
| 20379 |
+
"epoch": 3.88,
|
| 20380 |
+
"learning_rate": 0.00011547619047619047,
|
| 20381 |
+
"loss": 0.5378,
|
| 20382 |
+
"step": 3390
|
| 20383 |
+
},
|
| 20384 |
+
{
|
| 20385 |
+
"epoch": 3.89,
|
| 20386 |
+
"learning_rate": 0.00011535714285714285,
|
| 20387 |
+
"loss": 0.568,
|
| 20388 |
+
"step": 3391
|
| 20389 |
+
},
|
| 20390 |
+
{
|
| 20391 |
+
"epoch": 3.89,
|
| 20392 |
+
"learning_rate": 0.00011523809523809524,
|
| 20393 |
+
"loss": 0.5588,
|
| 20394 |
+
"step": 3392
|
| 20395 |
+
},
|
| 20396 |
+
{
|
| 20397 |
+
"epoch": 3.89,
|
| 20398 |
+
"learning_rate": 0.00011511904761904761,
|
| 20399 |
+
"loss": 0.5437,
|
| 20400 |
+
"step": 3393
|
| 20401 |
+
},
|
| 20402 |
+
{
|
| 20403 |
+
"epoch": 3.89,
|
| 20404 |
+
"learning_rate": 0.00011499999999999999,
|
| 20405 |
+
"loss": 0.5587,
|
| 20406 |
+
"step": 3394
|
| 20407 |
+
},
|
| 20408 |
+
{
|
| 20409 |
+
"epoch": 3.89,
|
| 20410 |
+
"learning_rate": 0.00011488095238095238,
|
| 20411 |
+
"loss": 0.554,
|
| 20412 |
+
"step": 3395
|
| 20413 |
+
},
|
| 20414 |
+
{
|
| 20415 |
+
"epoch": 3.89,
|
| 20416 |
+
"learning_rate": 0.00011476190476190475,
|
| 20417 |
+
"loss": 0.5555,
|
| 20418 |
+
"step": 3396
|
| 20419 |
+
},
|
| 20420 |
+
{
|
| 20421 |
+
"epoch": 3.89,
|
| 20422 |
+
"learning_rate": 0.00011464285714285714,
|
| 20423 |
+
"loss": 0.5172,
|
| 20424 |
+
"step": 3397
|
| 20425 |
+
},
|
| 20426 |
+
{
|
| 20427 |
+
"epoch": 3.89,
|
| 20428 |
+
"learning_rate": 0.00011452380952380952,
|
| 20429 |
+
"loss": 0.5415,
|
| 20430 |
+
"step": 3398
|
| 20431 |
+
},
|
| 20432 |
+
{
|
| 20433 |
+
"epoch": 3.89,
|
| 20434 |
+
"learning_rate": 0.00011440476190476191,
|
| 20435 |
+
"loss": 0.6094,
|
| 20436 |
+
"step": 3399
|
| 20437 |
+
},
|
| 20438 |
+
{
|
| 20439 |
+
"epoch": 3.9,
|
| 20440 |
+
"learning_rate": 0.00011428571428571428,
|
| 20441 |
+
"loss": 0.5242,
|
| 20442 |
+
"step": 3400
|
| 20443 |
+
},
|
| 20444 |
+
{
|
| 20445 |
+
"epoch": 3.9,
|
| 20446 |
+
"learning_rate": 0.00011416666666666667,
|
| 20447 |
+
"loss": 0.5363,
|
| 20448 |
+
"step": 3401
|
| 20449 |
+
},
|
| 20450 |
+
{
|
| 20451 |
+
"epoch": 3.9,
|
| 20452 |
+
"learning_rate": 0.00011404761904761905,
|
| 20453 |
+
"loss": 0.5712,
|
| 20454 |
+
"step": 3402
|
| 20455 |
+
},
|
| 20456 |
+
{
|
| 20457 |
+
"epoch": 3.9,
|
| 20458 |
+
"learning_rate": 0.00011392857142857142,
|
| 20459 |
+
"loss": 0.5653,
|
| 20460 |
+
"step": 3403
|
| 20461 |
+
},
|
| 20462 |
+
{
|
| 20463 |
+
"epoch": 3.9,
|
| 20464 |
+
"learning_rate": 0.00011380952380952381,
|
| 20465 |
+
"loss": 0.5509,
|
| 20466 |
+
"step": 3404
|
| 20467 |
+
},
|
| 20468 |
+
{
|
| 20469 |
+
"epoch": 3.9,
|
| 20470 |
+
"learning_rate": 0.00011369047619047619,
|
| 20471 |
+
"loss": 0.565,
|
| 20472 |
+
"step": 3405
|
| 20473 |
+
},
|
| 20474 |
+
{
|
| 20475 |
+
"epoch": 3.9,
|
| 20476 |
+
"learning_rate": 0.00011357142857142858,
|
| 20477 |
+
"loss": 0.5598,
|
| 20478 |
+
"step": 3406
|
| 20479 |
+
},
|
| 20480 |
+
{
|
| 20481 |
+
"epoch": 3.9,
|
| 20482 |
+
"learning_rate": 0.00011345238095238095,
|
| 20483 |
+
"loss": 0.6088,
|
| 20484 |
+
"step": 3407
|
| 20485 |
+
},
|
| 20486 |
+
{
|
| 20487 |
+
"epoch": 3.91,
|
| 20488 |
+
"learning_rate": 0.00011333333333333334,
|
| 20489 |
+
"loss": 0.5468,
|
| 20490 |
+
"step": 3408
|
| 20491 |
+
},
|
| 20492 |
+
{
|
| 20493 |
+
"epoch": 3.91,
|
| 20494 |
+
"learning_rate": 0.00011321428571428572,
|
| 20495 |
+
"loss": 0.5478,
|
| 20496 |
+
"step": 3409
|
| 20497 |
+
},
|
| 20498 |
+
{
|
| 20499 |
+
"epoch": 3.91,
|
| 20500 |
+
"learning_rate": 0.00011309523809523809,
|
| 20501 |
+
"loss": 0.5713,
|
| 20502 |
+
"step": 3410
|
| 20503 |
+
},
|
| 20504 |
+
{
|
| 20505 |
+
"epoch": 3.91,
|
| 20506 |
+
"learning_rate": 0.00011297619047619048,
|
| 20507 |
+
"loss": 0.5141,
|
| 20508 |
+
"step": 3411
|
| 20509 |
+
},
|
| 20510 |
+
{
|
| 20511 |
+
"epoch": 3.91,
|
| 20512 |
+
"learning_rate": 0.00011285714285714286,
|
| 20513 |
+
"loss": 0.5523,
|
| 20514 |
+
"step": 3412
|
| 20515 |
+
},
|
| 20516 |
+
{
|
| 20517 |
+
"epoch": 3.91,
|
| 20518 |
+
"learning_rate": 0.00011273809523809525,
|
| 20519 |
+
"loss": 0.5656,
|
| 20520 |
+
"step": 3413
|
| 20521 |
+
},
|
| 20522 |
+
{
|
| 20523 |
+
"epoch": 3.91,
|
| 20524 |
+
"learning_rate": 0.00011261904761904762,
|
| 20525 |
+
"loss": 0.5465,
|
| 20526 |
+
"step": 3414
|
| 20527 |
+
},
|
| 20528 |
+
{
|
| 20529 |
+
"epoch": 3.91,
|
| 20530 |
+
"learning_rate": 0.00011250000000000001,
|
| 20531 |
+
"loss": 0.5425,
|
| 20532 |
+
"step": 3415
|
| 20533 |
+
},
|
| 20534 |
+
{
|
| 20535 |
+
"epoch": 3.91,
|
| 20536 |
+
"learning_rate": 0.00011238095238095239,
|
| 20537 |
+
"loss": 0.5587,
|
| 20538 |
+
"step": 3416
|
| 20539 |
+
},
|
| 20540 |
+
{
|
| 20541 |
+
"epoch": 3.92,
|
| 20542 |
+
"learning_rate": 0.00011226190476190476,
|
| 20543 |
+
"loss": 0.5277,
|
| 20544 |
+
"step": 3417
|
| 20545 |
+
},
|
| 20546 |
+
{
|
| 20547 |
+
"epoch": 3.92,
|
| 20548 |
+
"learning_rate": 0.00011214285714285715,
|
| 20549 |
+
"loss": 0.5795,
|
| 20550 |
+
"step": 3418
|
| 20551 |
+
},
|
| 20552 |
+
{
|
| 20553 |
+
"epoch": 3.92,
|
| 20554 |
+
"learning_rate": 0.00011202380952380953,
|
| 20555 |
+
"loss": 0.57,
|
| 20556 |
+
"step": 3419
|
| 20557 |
+
},
|
| 20558 |
+
{
|
| 20559 |
+
"epoch": 3.92,
|
| 20560 |
+
"learning_rate": 0.00011190476190476191,
|
| 20561 |
+
"loss": 0.5737,
|
| 20562 |
+
"step": 3420
|
| 20563 |
+
},
|
| 20564 |
+
{
|
| 20565 |
+
"epoch": 3.92,
|
| 20566 |
+
"learning_rate": 0.00011178571428571429,
|
| 20567 |
+
"loss": 0.549,
|
| 20568 |
+
"step": 3421
|
| 20569 |
+
},
|
| 20570 |
+
{
|
| 20571 |
+
"epoch": 3.92,
|
| 20572 |
+
"learning_rate": 0.00011166666666666668,
|
| 20573 |
+
"loss": 0.5521,
|
| 20574 |
+
"step": 3422
|
| 20575 |
+
},
|
| 20576 |
+
{
|
| 20577 |
+
"epoch": 3.92,
|
| 20578 |
+
"learning_rate": 0.00011154761904761905,
|
| 20579 |
+
"loss": 0.564,
|
| 20580 |
+
"step": 3423
|
| 20581 |
+
},
|
| 20582 |
+
{
|
| 20583 |
+
"epoch": 3.92,
|
| 20584 |
+
"learning_rate": 0.00011142857142857144,
|
| 20585 |
+
"loss": 0.5637,
|
| 20586 |
+
"step": 3424
|
| 20587 |
+
},
|
| 20588 |
+
{
|
| 20589 |
+
"epoch": 3.92,
|
| 20590 |
+
"learning_rate": 0.00011130952380952382,
|
| 20591 |
+
"loss": 0.5492,
|
| 20592 |
+
"step": 3425
|
| 20593 |
+
},
|
| 20594 |
+
{
|
| 20595 |
+
"epoch": 3.93,
|
| 20596 |
+
"learning_rate": 0.0001111904761904762,
|
| 20597 |
+
"loss": 0.5814,
|
| 20598 |
+
"step": 3426
|
| 20599 |
+
},
|
| 20600 |
+
{
|
| 20601 |
+
"epoch": 3.93,
|
| 20602 |
+
"learning_rate": 0.00011107142857142858,
|
| 20603 |
+
"loss": 0.5465,
|
| 20604 |
+
"step": 3427
|
| 20605 |
+
},
|
| 20606 |
+
{
|
| 20607 |
+
"epoch": 3.93,
|
| 20608 |
+
"learning_rate": 0.00011095238095238096,
|
| 20609 |
+
"loss": 0.541,
|
| 20610 |
+
"step": 3428
|
| 20611 |
+
},
|
| 20612 |
+
{
|
| 20613 |
+
"epoch": 3.93,
|
| 20614 |
+
"learning_rate": 0.00011083333333333335,
|
| 20615 |
+
"loss": 0.561,
|
| 20616 |
+
"step": 3429
|
| 20617 |
+
},
|
| 20618 |
+
{
|
| 20619 |
+
"epoch": 3.93,
|
| 20620 |
+
"learning_rate": 0.00011071428571428572,
|
| 20621 |
+
"loss": 0.5535,
|
| 20622 |
+
"step": 3430
|
| 20623 |
+
},
|
| 20624 |
+
{
|
| 20625 |
+
"epoch": 3.93,
|
| 20626 |
+
"learning_rate": 0.00011059523809523811,
|
| 20627 |
+
"loss": 0.569,
|
| 20628 |
+
"step": 3431
|
| 20629 |
+
},
|
| 20630 |
+
{
|
| 20631 |
+
"epoch": 3.93,
|
| 20632 |
+
"learning_rate": 0.00011047619047619049,
|
| 20633 |
+
"loss": 0.585,
|
| 20634 |
+
"step": 3432
|
| 20635 |
+
},
|
| 20636 |
+
{
|
| 20637 |
+
"epoch": 3.93,
|
| 20638 |
+
"learning_rate": 0.00011035714285714286,
|
| 20639 |
+
"loss": 0.5395,
|
| 20640 |
+
"step": 3433
|
| 20641 |
+
},
|
| 20642 |
+
{
|
| 20643 |
+
"epoch": 3.93,
|
| 20644 |
+
"learning_rate": 0.00011023809523809525,
|
| 20645 |
+
"loss": 0.5352,
|
| 20646 |
+
"step": 3434
|
| 20647 |
+
},
|
| 20648 |
+
{
|
| 20649 |
+
"epoch": 3.94,
|
| 20650 |
+
"learning_rate": 0.00011011904761904763,
|
| 20651 |
+
"loss": 0.5539,
|
| 20652 |
+
"step": 3435
|
| 20653 |
+
},
|
| 20654 |
+
{
|
| 20655 |
+
"epoch": 3.94,
|
| 20656 |
+
"learning_rate": 0.00011000000000000002,
|
| 20657 |
+
"loss": 0.5268,
|
| 20658 |
+
"step": 3436
|
| 20659 |
+
},
|
| 20660 |
+
{
|
| 20661 |
+
"epoch": 3.94,
|
| 20662 |
+
"learning_rate": 0.00010988095238095239,
|
| 20663 |
+
"loss": 0.5269,
|
| 20664 |
+
"step": 3437
|
| 20665 |
+
},
|
| 20666 |
+
{
|
| 20667 |
+
"epoch": 3.94,
|
| 20668 |
+
"learning_rate": 0.00010976190476190478,
|
| 20669 |
+
"loss": 0.5493,
|
| 20670 |
+
"step": 3438
|
| 20671 |
+
},
|
| 20672 |
+
{
|
| 20673 |
+
"epoch": 3.94,
|
| 20674 |
+
"learning_rate": 0.00010964285714285716,
|
| 20675 |
+
"loss": 0.5784,
|
| 20676 |
+
"step": 3439
|
| 20677 |
+
},
|
| 20678 |
+
{
|
| 20679 |
+
"epoch": 3.94,
|
| 20680 |
+
"learning_rate": 0.00010952380952380953,
|
| 20681 |
+
"loss": 0.5296,
|
| 20682 |
+
"step": 3440
|
| 20683 |
+
},
|
| 20684 |
+
{
|
| 20685 |
+
"epoch": 3.94,
|
| 20686 |
+
"learning_rate": 0.00010940476190476192,
|
| 20687 |
+
"loss": 0.5315,
|
| 20688 |
+
"step": 3441
|
| 20689 |
+
},
|
| 20690 |
+
{
|
| 20691 |
+
"epoch": 3.94,
|
| 20692 |
+
"learning_rate": 0.00010928571428571428,
|
| 20693 |
+
"loss": 0.5258,
|
| 20694 |
+
"step": 3442
|
| 20695 |
+
},
|
| 20696 |
+
{
|
| 20697 |
+
"epoch": 3.95,
|
| 20698 |
+
"learning_rate": 0.00010916666666666666,
|
| 20699 |
+
"loss": 0.5278,
|
| 20700 |
+
"step": 3443
|
| 20701 |
+
},
|
| 20702 |
+
{
|
| 20703 |
+
"epoch": 3.95,
|
| 20704 |
+
"learning_rate": 0.00010904761904761905,
|
| 20705 |
+
"loss": 0.5659,
|
| 20706 |
+
"step": 3444
|
| 20707 |
+
},
|
| 20708 |
+
{
|
| 20709 |
+
"epoch": 3.95,
|
| 20710 |
+
"learning_rate": 0.00010892857142857142,
|
| 20711 |
+
"loss": 0.5762,
|
| 20712 |
+
"step": 3445
|
| 20713 |
+
},
|
| 20714 |
+
{
|
| 20715 |
+
"epoch": 3.95,
|
| 20716 |
+
"learning_rate": 0.0001088095238095238,
|
| 20717 |
+
"loss": 0.5747,
|
| 20718 |
+
"step": 3446
|
| 20719 |
+
},
|
| 20720 |
+
{
|
| 20721 |
+
"epoch": 3.95,
|
| 20722 |
+
"learning_rate": 0.00010869047619047619,
|
| 20723 |
+
"loss": 0.5378,
|
| 20724 |
+
"step": 3447
|
| 20725 |
+
},
|
| 20726 |
+
{
|
| 20727 |
+
"epoch": 3.95,
|
| 20728 |
+
"learning_rate": 0.00010857142857142856,
|
| 20729 |
+
"loss": 0.5867,
|
| 20730 |
+
"step": 3448
|
| 20731 |
+
},
|
| 20732 |
+
{
|
| 20733 |
+
"epoch": 3.95,
|
| 20734 |
+
"learning_rate": 0.00010845238095238095,
|
| 20735 |
+
"loss": 0.5225,
|
| 20736 |
+
"step": 3449
|
| 20737 |
+
},
|
| 20738 |
+
{
|
| 20739 |
+
"epoch": 3.95,
|
| 20740 |
+
"learning_rate": 0.00010833333333333333,
|
| 20741 |
+
"loss": 0.5508,
|
| 20742 |
+
"step": 3450
|
| 20743 |
+
},
|
| 20744 |
+
{
|
| 20745 |
+
"epoch": 3.95,
|
| 20746 |
+
"learning_rate": 0.00010821428571428572,
|
| 20747 |
+
"loss": 0.5089,
|
| 20748 |
+
"step": 3451
|
| 20749 |
+
},
|
| 20750 |
+
{
|
| 20751 |
+
"epoch": 3.96,
|
| 20752 |
+
"learning_rate": 0.00010809523809523809,
|
| 20753 |
+
"loss": 0.5721,
|
| 20754 |
+
"step": 3452
|
| 20755 |
+
},
|
| 20756 |
+
{
|
| 20757 |
+
"epoch": 3.96,
|
| 20758 |
+
"learning_rate": 0.00010797619047619048,
|
| 20759 |
+
"loss": 0.5763,
|
| 20760 |
+
"step": 3453
|
| 20761 |
+
},
|
| 20762 |
+
{
|
| 20763 |
+
"epoch": 3.96,
|
| 20764 |
+
"learning_rate": 0.00010785714285714286,
|
| 20765 |
+
"loss": 0.5732,
|
| 20766 |
+
"step": 3454
|
| 20767 |
+
},
|
| 20768 |
+
{
|
| 20769 |
+
"epoch": 3.96,
|
| 20770 |
+
"learning_rate": 0.00010773809523809523,
|
| 20771 |
+
"loss": 0.5181,
|
| 20772 |
+
"step": 3455
|
| 20773 |
+
},
|
| 20774 |
+
{
|
| 20775 |
+
"epoch": 3.96,
|
| 20776 |
+
"learning_rate": 0.00010761904761904762,
|
| 20777 |
+
"loss": 0.5731,
|
| 20778 |
+
"step": 3456
|
| 20779 |
+
},
|
| 20780 |
+
{
|
| 20781 |
+
"epoch": 3.96,
|
| 20782 |
+
"learning_rate": 0.0001075,
|
| 20783 |
+
"loss": 0.5341,
|
| 20784 |
+
"step": 3457
|
| 20785 |
+
},
|
| 20786 |
+
{
|
| 20787 |
+
"epoch": 3.96,
|
| 20788 |
+
"learning_rate": 0.00010738095238095239,
|
| 20789 |
+
"loss": 0.5768,
|
| 20790 |
+
"step": 3458
|
| 20791 |
+
},
|
| 20792 |
+
{
|
| 20793 |
+
"epoch": 3.96,
|
| 20794 |
+
"learning_rate": 0.00010726190476190476,
|
| 20795 |
+
"loss": 0.5666,
|
| 20796 |
+
"step": 3459
|
| 20797 |
+
},
|
| 20798 |
+
{
|
| 20799 |
+
"epoch": 3.96,
|
| 20800 |
+
"learning_rate": 0.00010714285714285715,
|
| 20801 |
+
"loss": 0.5368,
|
| 20802 |
+
"step": 3460
|
| 20803 |
+
},
|
| 20804 |
+
{
|
| 20805 |
+
"epoch": 3.97,
|
| 20806 |
+
"learning_rate": 0.00010702380952380953,
|
| 20807 |
+
"loss": 0.5522,
|
| 20808 |
+
"step": 3461
|
| 20809 |
+
},
|
| 20810 |
+
{
|
| 20811 |
+
"epoch": 3.97,
|
| 20812 |
+
"learning_rate": 0.0001069047619047619,
|
| 20813 |
+
"loss": 0.5912,
|
| 20814 |
+
"step": 3462
|
| 20815 |
+
},
|
| 20816 |
+
{
|
| 20817 |
+
"epoch": 3.97,
|
| 20818 |
+
"learning_rate": 0.00010678571428571429,
|
| 20819 |
+
"loss": 0.5751,
|
| 20820 |
+
"step": 3463
|
| 20821 |
+
},
|
| 20822 |
+
{
|
| 20823 |
+
"epoch": 3.97,
|
| 20824 |
+
"learning_rate": 0.00010666666666666667,
|
| 20825 |
+
"loss": 0.561,
|
| 20826 |
+
"step": 3464
|
| 20827 |
+
},
|
| 20828 |
+
{
|
| 20829 |
+
"epoch": 3.97,
|
| 20830 |
+
"learning_rate": 0.00010654761904761906,
|
| 20831 |
+
"loss": 0.5452,
|
| 20832 |
+
"step": 3465
|
| 20833 |
+
},
|
| 20834 |
+
{
|
| 20835 |
+
"epoch": 3.97,
|
| 20836 |
+
"learning_rate": 0.00010642857142857143,
|
| 20837 |
+
"loss": 0.5413,
|
| 20838 |
+
"step": 3466
|
| 20839 |
+
},
|
| 20840 |
+
{
|
| 20841 |
+
"epoch": 3.97,
|
| 20842 |
+
"learning_rate": 0.00010630952380952382,
|
| 20843 |
+
"loss": 0.5632,
|
| 20844 |
+
"step": 3467
|
| 20845 |
+
},
|
| 20846 |
+
{
|
| 20847 |
+
"epoch": 3.97,
|
| 20848 |
+
"learning_rate": 0.0001061904761904762,
|
| 20849 |
+
"loss": 0.5021,
|
| 20850 |
+
"step": 3468
|
| 20851 |
+
},
|
| 20852 |
+
{
|
| 20853 |
+
"epoch": 3.98,
|
| 20854 |
+
"learning_rate": 0.00010607142857142857,
|
| 20855 |
+
"loss": 0.5845,
|
| 20856 |
+
"step": 3469
|
| 20857 |
+
},
|
| 20858 |
+
{
|
| 20859 |
+
"epoch": 3.98,
|
| 20860 |
+
"learning_rate": 0.00010595238095238096,
|
| 20861 |
+
"loss": 0.535,
|
| 20862 |
+
"step": 3470
|
| 20863 |
+
},
|
| 20864 |
+
{
|
| 20865 |
+
"epoch": 3.98,
|
| 20866 |
+
"learning_rate": 0.00010583333333333334,
|
| 20867 |
+
"loss": 0.532,
|
| 20868 |
+
"step": 3471
|
| 20869 |
+
},
|
| 20870 |
+
{
|
| 20871 |
+
"epoch": 3.98,
|
| 20872 |
+
"learning_rate": 0.00010571428571428572,
|
| 20873 |
+
"loss": 0.536,
|
| 20874 |
+
"step": 3472
|
| 20875 |
+
},
|
| 20876 |
+
{
|
| 20877 |
+
"epoch": 3.98,
|
| 20878 |
+
"learning_rate": 0.0001055952380952381,
|
| 20879 |
+
"loss": 0.5485,
|
| 20880 |
+
"step": 3473
|
| 20881 |
+
},
|
| 20882 |
+
{
|
| 20883 |
+
"epoch": 3.98,
|
| 20884 |
+
"learning_rate": 0.00010547619047619049,
|
| 20885 |
+
"loss": 0.5117,
|
| 20886 |
+
"step": 3474
|
| 20887 |
+
},
|
| 20888 |
+
{
|
| 20889 |
+
"epoch": 3.98,
|
| 20890 |
+
"learning_rate": 0.00010535714285714286,
|
| 20891 |
+
"loss": 0.5416,
|
| 20892 |
+
"step": 3475
|
| 20893 |
+
},
|
| 20894 |
+
{
|
| 20895 |
+
"epoch": 3.98,
|
| 20896 |
+
"learning_rate": 0.00010523809523809525,
|
| 20897 |
+
"loss": 0.5257,
|
| 20898 |
+
"step": 3476
|
| 20899 |
+
},
|
| 20900 |
+
{
|
| 20901 |
+
"epoch": 3.98,
|
| 20902 |
+
"learning_rate": 0.00010511904761904763,
|
| 20903 |
+
"loss": 0.5464,
|
| 20904 |
+
"step": 3477
|
| 20905 |
+
},
|
| 20906 |
+
{
|
| 20907 |
+
"epoch": 3.99,
|
| 20908 |
+
"learning_rate": 0.000105,
|
| 20909 |
+
"loss": 0.5275,
|
| 20910 |
+
"step": 3478
|
| 20911 |
+
},
|
| 20912 |
+
{
|
| 20913 |
+
"epoch": 3.99,
|
| 20914 |
+
"learning_rate": 0.0001048809523809524,
|
| 20915 |
+
"loss": 0.5764,
|
| 20916 |
+
"step": 3479
|
| 20917 |
+
},
|
| 20918 |
+
{
|
| 20919 |
+
"epoch": 3.99,
|
| 20920 |
+
"learning_rate": 0.00010476190476190477,
|
| 20921 |
+
"loss": 0.5743,
|
| 20922 |
+
"step": 3480
|
| 20923 |
+
},
|
| 20924 |
+
{
|
| 20925 |
+
"epoch": 3.99,
|
| 20926 |
+
"learning_rate": 0.00010464285714285716,
|
| 20927 |
+
"loss": 0.5315,
|
| 20928 |
+
"step": 3481
|
| 20929 |
+
},
|
| 20930 |
+
{
|
| 20931 |
+
"epoch": 3.99,
|
| 20932 |
+
"learning_rate": 0.00010452380952380953,
|
| 20933 |
+
"loss": 0.5622,
|
| 20934 |
+
"step": 3482
|
| 20935 |
+
},
|
| 20936 |
+
{
|
| 20937 |
+
"epoch": 3.99,
|
| 20938 |
+
"learning_rate": 0.00010440476190476192,
|
| 20939 |
+
"loss": 0.5292,
|
| 20940 |
+
"step": 3483
|
| 20941 |
+
},
|
| 20942 |
+
{
|
| 20943 |
+
"epoch": 3.99,
|
| 20944 |
+
"learning_rate": 0.0001042857142857143,
|
| 20945 |
+
"loss": 0.5818,
|
| 20946 |
+
"step": 3484
|
| 20947 |
+
},
|
| 20948 |
+
{
|
| 20949 |
+
"epoch": 3.99,
|
| 20950 |
+
"learning_rate": 0.00010416666666666667,
|
| 20951 |
+
"loss": 0.5681,
|
| 20952 |
+
"step": 3485
|
| 20953 |
+
},
|
| 20954 |
+
{
|
| 20955 |
+
"epoch": 3.99,
|
| 20956 |
+
"learning_rate": 0.00010404761904761906,
|
| 20957 |
+
"loss": 0.6092,
|
| 20958 |
+
"step": 3486
|
| 20959 |
+
},
|
| 20960 |
+
{
|
| 20961 |
+
"epoch": 4.0,
|
| 20962 |
+
"learning_rate": 0.00010392857142857144,
|
| 20963 |
+
"loss": 0.538,
|
| 20964 |
+
"step": 3487
|
| 20965 |
+
},
|
| 20966 |
+
{
|
| 20967 |
+
"epoch": 4.0,
|
| 20968 |
+
"learning_rate": 0.00010380952380952383,
|
| 20969 |
+
"loss": 0.557,
|
| 20970 |
+
"step": 3488
|
| 20971 |
}
|
| 20972 |
],
|
| 20973 |
"logging_steps": 1,
|
| 20974 |
"max_steps": 4360,
|
| 20975 |
"num_train_epochs": 5,
|
| 20976 |
"save_steps": 218,
|
| 20977 |
+
"total_flos": 6.920282952314546e+19,
|
| 20978 |
"trial_name": null,
|
| 20979 |
"trial_params": null
|
| 20980 |
}
|