Training in progress, step 1385, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 80013120
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d26493a5ca60fca0062f30d3405b846650609c8c785e1f3a6455db1063be8ef7
|
| 3 |
size 80013120
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 41120084
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fc41aaae2b7ab8d47d9b5a727a60d6e0d0e58e8272b313d73bd352640a7ed73c
|
| 3 |
size 41120084
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5e635a96928847cf573055c1fd5602b4297c94f6635a571521fb152406081a50
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:345b087a89a94b4e04adce8f5400cfa20e0daa5fd60e9c52298f716df1f64454
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 347,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -7326,6 +7326,2414 @@
|
|
| 7326 |
"eval_samples_per_second": 34.471,
|
| 7327 |
"eval_steps_per_second": 17.236,
|
| 7328 |
"step": 1041
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7329 |
}
|
| 7330 |
],
|
| 7331 |
"logging_steps": 1,
|
|
@@ -7340,12 +9748,12 @@
|
|
| 7340 |
"should_evaluate": false,
|
| 7341 |
"should_log": false,
|
| 7342 |
"should_save": true,
|
| 7343 |
-
"should_training_stop":
|
| 7344 |
},
|
| 7345 |
"attributes": {}
|
| 7346 |
}
|
| 7347 |
},
|
| 7348 |
-
"total_flos":
|
| 7349 |
"train_batch_size": 2,
|
| 7350 |
"trial_name": null,
|
| 7351 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.383284903832849,
|
| 5 |
"eval_steps": 347,
|
| 6 |
+
"global_step": 1385,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 7326 |
"eval_samples_per_second": 34.471,
|
| 7327 |
"eval_steps_per_second": 17.236,
|
| 7328 |
"step": 1041
|
| 7329 |
+
},
|
| 7330 |
+
{
|
| 7331 |
+
"epoch": 0.28836308288363083,
|
| 7332 |
+
"grad_norm": 0.6717551350593567,
|
| 7333 |
+
"learning_rate": 2.916825622375794e-05,
|
| 7334 |
+
"loss": 1.2481,
|
| 7335 |
+
"step": 1042
|
| 7336 |
+
},
|
| 7337 |
+
{
|
| 7338 |
+
"epoch": 0.2886398228863982,
|
| 7339 |
+
"grad_norm": 0.668138325214386,
|
| 7340 |
+
"learning_rate": 2.9007158950005985e-05,
|
| 7341 |
+
"loss": 1.2469,
|
| 7342 |
+
"step": 1043
|
| 7343 |
+
},
|
| 7344 |
+
{
|
| 7345 |
+
"epoch": 0.2889165628891656,
|
| 7346 |
+
"grad_norm": 0.7524674534797668,
|
| 7347 |
+
"learning_rate": 2.8846432279071467e-05,
|
| 7348 |
+
"loss": 1.287,
|
| 7349 |
+
"step": 1044
|
| 7350 |
+
},
|
| 7351 |
+
{
|
| 7352 |
+
"epoch": 0.289193302891933,
|
| 7353 |
+
"grad_norm": 0.5722724199295044,
|
| 7354 |
+
"learning_rate": 2.8686077049993287e-05,
|
| 7355 |
+
"loss": 1.396,
|
| 7356 |
+
"step": 1045
|
| 7357 |
+
},
|
| 7358 |
+
{
|
| 7359 |
+
"epoch": 0.2894700428947004,
|
| 7360 |
+
"grad_norm": 0.6176677942276001,
|
| 7361 |
+
"learning_rate": 2.8526094099871315e-05,
|
| 7362 |
+
"loss": 1.2608,
|
| 7363 |
+
"step": 1046
|
| 7364 |
+
},
|
| 7365 |
+
{
|
| 7366 |
+
"epoch": 0.2897467828974678,
|
| 7367 |
+
"grad_norm": 0.6025077104568481,
|
| 7368 |
+
"learning_rate": 2.8366484263862083e-05,
|
| 7369 |
+
"loss": 1.5375,
|
| 7370 |
+
"step": 1047
|
| 7371 |
+
},
|
| 7372 |
+
{
|
| 7373 |
+
"epoch": 0.29002352290023525,
|
| 7374 |
+
"grad_norm": 0.8081961274147034,
|
| 7375 |
+
"learning_rate": 2.82072483751743e-05,
|
| 7376 |
+
"loss": 1.4041,
|
| 7377 |
+
"step": 1048
|
| 7378 |
+
},
|
| 7379 |
+
{
|
| 7380 |
+
"epoch": 0.29030026290300265,
|
| 7381 |
+
"grad_norm": 0.6383569836616516,
|
| 7382 |
+
"learning_rate": 2.8048387265064556e-05,
|
| 7383 |
+
"loss": 1.1895,
|
| 7384 |
+
"step": 1049
|
| 7385 |
+
},
|
| 7386 |
+
{
|
| 7387 |
+
"epoch": 0.29057700290577004,
|
| 7388 |
+
"grad_norm": 0.6521155834197998,
|
| 7389 |
+
"learning_rate": 2.7889901762833083e-05,
|
| 7390 |
+
"loss": 1.9584,
|
| 7391 |
+
"step": 1050
|
| 7392 |
+
},
|
| 7393 |
+
{
|
| 7394 |
+
"epoch": 0.29085374290853744,
|
| 7395 |
+
"grad_norm": 0.4987170994281769,
|
| 7396 |
+
"learning_rate": 2.7731792695819225e-05,
|
| 7397 |
+
"loss": 1.0366,
|
| 7398 |
+
"step": 1051
|
| 7399 |
+
},
|
| 7400 |
+
{
|
| 7401 |
+
"epoch": 0.29113048291130483,
|
| 7402 |
+
"grad_norm": 0.6860690116882324,
|
| 7403 |
+
"learning_rate": 2.7574060889397257e-05,
|
| 7404 |
+
"loss": 1.1016,
|
| 7405 |
+
"step": 1052
|
| 7406 |
+
},
|
| 7407 |
+
{
|
| 7408 |
+
"epoch": 0.29140722291407223,
|
| 7409 |
+
"grad_norm": 0.6685996055603027,
|
| 7410 |
+
"learning_rate": 2.7416707166972112e-05,
|
| 7411 |
+
"loss": 1.059,
|
| 7412 |
+
"step": 1053
|
| 7413 |
+
},
|
| 7414 |
+
{
|
| 7415 |
+
"epoch": 0.2916839629168396,
|
| 7416 |
+
"grad_norm": 0.6234917044639587,
|
| 7417 |
+
"learning_rate": 2.7259732349974898e-05,
|
| 7418 |
+
"loss": 1.3718,
|
| 7419 |
+
"step": 1054
|
| 7420 |
+
},
|
| 7421 |
+
{
|
| 7422 |
+
"epoch": 0.291960702919607,
|
| 7423 |
+
"grad_norm": 0.5685679316520691,
|
| 7424 |
+
"learning_rate": 2.7103137257858868e-05,
|
| 7425 |
+
"loss": 1.5001,
|
| 7426 |
+
"step": 1055
|
| 7427 |
+
},
|
| 7428 |
+
{
|
| 7429 |
+
"epoch": 0.2922374429223744,
|
| 7430 |
+
"grad_norm": 0.4766806662082672,
|
| 7431 |
+
"learning_rate": 2.694692270809487e-05,
|
| 7432 |
+
"loss": 1.0419,
|
| 7433 |
+
"step": 1056
|
| 7434 |
+
},
|
| 7435 |
+
{
|
| 7436 |
+
"epoch": 0.2925141829251418,
|
| 7437 |
+
"grad_norm": 0.5562795996665955,
|
| 7438 |
+
"learning_rate": 2.679108951616729e-05,
|
| 7439 |
+
"loss": 1.0802,
|
| 7440 |
+
"step": 1057
|
| 7441 |
+
},
|
| 7442 |
+
{
|
| 7443 |
+
"epoch": 0.2927909229279092,
|
| 7444 |
+
"grad_norm": 0.6752498745918274,
|
| 7445 |
+
"learning_rate": 2.663563849556976e-05,
|
| 7446 |
+
"loss": 1.1668,
|
| 7447 |
+
"step": 1058
|
| 7448 |
+
},
|
| 7449 |
+
{
|
| 7450 |
+
"epoch": 0.29306766293067665,
|
| 7451 |
+
"grad_norm": 0.5150704979896545,
|
| 7452 |
+
"learning_rate": 2.6480570457800745e-05,
|
| 7453 |
+
"loss": 1.5995,
|
| 7454 |
+
"step": 1059
|
| 7455 |
+
},
|
| 7456 |
+
{
|
| 7457 |
+
"epoch": 0.29334440293344405,
|
| 7458 |
+
"grad_norm": 0.6578889489173889,
|
| 7459 |
+
"learning_rate": 2.6325886212359498e-05,
|
| 7460 |
+
"loss": 1.0849,
|
| 7461 |
+
"step": 1060
|
| 7462 |
+
},
|
| 7463 |
+
{
|
| 7464 |
+
"epoch": 0.29362114293621144,
|
| 7465 |
+
"grad_norm": 0.5256742835044861,
|
| 7466 |
+
"learning_rate": 2.6171586566741814e-05,
|
| 7467 |
+
"loss": 1.5569,
|
| 7468 |
+
"step": 1061
|
| 7469 |
+
},
|
| 7470 |
+
{
|
| 7471 |
+
"epoch": 0.29389788293897884,
|
| 7472 |
+
"grad_norm": 0.6316726207733154,
|
| 7473 |
+
"learning_rate": 2.6017672326435683e-05,
|
| 7474 |
+
"loss": 1.1035,
|
| 7475 |
+
"step": 1062
|
| 7476 |
+
},
|
| 7477 |
+
{
|
| 7478 |
+
"epoch": 0.29417462294174623,
|
| 7479 |
+
"grad_norm": 0.5075963139533997,
|
| 7480 |
+
"learning_rate": 2.586414429491718e-05,
|
| 7481 |
+
"loss": 1.3896,
|
| 7482 |
+
"step": 1063
|
| 7483 |
+
},
|
| 7484 |
+
{
|
| 7485 |
+
"epoch": 0.29445136294451363,
|
| 7486 |
+
"grad_norm": 0.7691916823387146,
|
| 7487 |
+
"learning_rate": 2.571100327364634e-05,
|
| 7488 |
+
"loss": 1.2896,
|
| 7489 |
+
"step": 1064
|
| 7490 |
+
},
|
| 7491 |
+
{
|
| 7492 |
+
"epoch": 0.294728102947281,
|
| 7493 |
+
"grad_norm": 0.70186448097229,
|
| 7494 |
+
"learning_rate": 2.5558250062062828e-05,
|
| 7495 |
+
"loss": 1.1655,
|
| 7496 |
+
"step": 1065
|
| 7497 |
+
},
|
| 7498 |
+
{
|
| 7499 |
+
"epoch": 0.2950048429500484,
|
| 7500 |
+
"grad_norm": 0.48962098360061646,
|
| 7501 |
+
"learning_rate": 2.540588545758179e-05,
|
| 7502 |
+
"loss": 1.5383,
|
| 7503 |
+
"step": 1066
|
| 7504 |
+
},
|
| 7505 |
+
{
|
| 7506 |
+
"epoch": 0.2952815829528158,
|
| 7507 |
+
"grad_norm": 0.6058703660964966,
|
| 7508 |
+
"learning_rate": 2.5253910255589864e-05,
|
| 7509 |
+
"loss": 1.0533,
|
| 7510 |
+
"step": 1067
|
| 7511 |
+
},
|
| 7512 |
+
{
|
| 7513 |
+
"epoch": 0.2955583229555832,
|
| 7514 |
+
"grad_norm": 0.5107095837593079,
|
| 7515 |
+
"learning_rate": 2.510232524944076e-05,
|
| 7516 |
+
"loss": 1.9488,
|
| 7517 |
+
"step": 1068
|
| 7518 |
+
},
|
| 7519 |
+
{
|
| 7520 |
+
"epoch": 0.2958350629583506,
|
| 7521 |
+
"grad_norm": 0.5227340459823608,
|
| 7522 |
+
"learning_rate": 2.4951131230451363e-05,
|
| 7523 |
+
"loss": 1.0348,
|
| 7524 |
+
"step": 1069
|
| 7525 |
+
},
|
| 7526 |
+
{
|
| 7527 |
+
"epoch": 0.29611180296111805,
|
| 7528 |
+
"grad_norm": 0.6033306121826172,
|
| 7529 |
+
"learning_rate": 2.4800328987897427e-05,
|
| 7530 |
+
"loss": 1.2017,
|
| 7531 |
+
"step": 1070
|
| 7532 |
+
},
|
| 7533 |
+
{
|
| 7534 |
+
"epoch": 0.29638854296388545,
|
| 7535 |
+
"grad_norm": 0.6987389922142029,
|
| 7536 |
+
"learning_rate": 2.4649919309009496e-05,
|
| 7537 |
+
"loss": 1.5657,
|
| 7538 |
+
"step": 1071
|
| 7539 |
+
},
|
| 7540 |
+
{
|
| 7541 |
+
"epoch": 0.29666528296665284,
|
| 7542 |
+
"grad_norm": 0.5803182125091553,
|
| 7543 |
+
"learning_rate": 2.4499902978968935e-05,
|
| 7544 |
+
"loss": 1.7115,
|
| 7545 |
+
"step": 1072
|
| 7546 |
+
},
|
| 7547 |
+
{
|
| 7548 |
+
"epoch": 0.29694202296942024,
|
| 7549 |
+
"grad_norm": 0.46591827273368835,
|
| 7550 |
+
"learning_rate": 2.435028078090358e-05,
|
| 7551 |
+
"loss": 1.4757,
|
| 7552 |
+
"step": 1073
|
| 7553 |
+
},
|
| 7554 |
+
{
|
| 7555 |
+
"epoch": 0.29721876297218763,
|
| 7556 |
+
"grad_norm": 0.7896290421485901,
|
| 7557 |
+
"learning_rate": 2.420105349588391e-05,
|
| 7558 |
+
"loss": 1.3003,
|
| 7559 |
+
"step": 1074
|
| 7560 |
+
},
|
| 7561 |
+
{
|
| 7562 |
+
"epoch": 0.29749550297495503,
|
| 7563 |
+
"grad_norm": 0.626768171787262,
|
| 7564 |
+
"learning_rate": 2.4052221902918725e-05,
|
| 7565 |
+
"loss": 1.1198,
|
| 7566 |
+
"step": 1075
|
| 7567 |
+
},
|
| 7568 |
+
{
|
| 7569 |
+
"epoch": 0.2977722429777224,
|
| 7570 |
+
"grad_norm": 0.6310368180274963,
|
| 7571 |
+
"learning_rate": 2.390378677895132e-05,
|
| 7572 |
+
"loss": 1.1947,
|
| 7573 |
+
"step": 1076
|
| 7574 |
+
},
|
| 7575 |
+
{
|
| 7576 |
+
"epoch": 0.2980489829804898,
|
| 7577 |
+
"grad_norm": 0.580138087272644,
|
| 7578 |
+
"learning_rate": 2.37557488988552e-05,
|
| 7579 |
+
"loss": 1.5959,
|
| 7580 |
+
"step": 1077
|
| 7581 |
+
},
|
| 7582 |
+
{
|
| 7583 |
+
"epoch": 0.2983257229832572,
|
| 7584 |
+
"grad_norm": 0.8005403280258179,
|
| 7585 |
+
"learning_rate": 2.360810903543025e-05,
|
| 7586 |
+
"loss": 1.8946,
|
| 7587 |
+
"step": 1078
|
| 7588 |
+
},
|
| 7589 |
+
{
|
| 7590 |
+
"epoch": 0.2986024629860246,
|
| 7591 |
+
"grad_norm": 0.5405651926994324,
|
| 7592 |
+
"learning_rate": 2.346086795939847e-05,
|
| 7593 |
+
"loss": 1.5686,
|
| 7594 |
+
"step": 1079
|
| 7595 |
+
},
|
| 7596 |
+
{
|
| 7597 |
+
"epoch": 0.298879202988792,
|
| 7598 |
+
"grad_norm": 0.5966051816940308,
|
| 7599 |
+
"learning_rate": 2.3314026439400217e-05,
|
| 7600 |
+
"loss": 1.4241,
|
| 7601 |
+
"step": 1080
|
| 7602 |
+
},
|
| 7603 |
+
{
|
| 7604 |
+
"epoch": 0.29915594299155945,
|
| 7605 |
+
"grad_norm": 0.5318151116371155,
|
| 7606 |
+
"learning_rate": 2.316758524198994e-05,
|
| 7607 |
+
"loss": 1.5096,
|
| 7608 |
+
"step": 1081
|
| 7609 |
+
},
|
| 7610 |
+
{
|
| 7611 |
+
"epoch": 0.29943268299432685,
|
| 7612 |
+
"grad_norm": 0.6126238107681274,
|
| 7613 |
+
"learning_rate": 2.3021545131632314e-05,
|
| 7614 |
+
"loss": 1.235,
|
| 7615 |
+
"step": 1082
|
| 7616 |
+
},
|
| 7617 |
+
{
|
| 7618 |
+
"epoch": 0.29970942299709424,
|
| 7619 |
+
"grad_norm": 0.5007676482200623,
|
| 7620 |
+
"learning_rate": 2.2875906870698294e-05,
|
| 7621 |
+
"loss": 1.1472,
|
| 7622 |
+
"step": 1083
|
| 7623 |
+
},
|
| 7624 |
+
{
|
| 7625 |
+
"epoch": 0.29998616299986164,
|
| 7626 |
+
"grad_norm": 0.6831592321395874,
|
| 7627 |
+
"learning_rate": 2.2730671219460985e-05,
|
| 7628 |
+
"loss": 1.2188,
|
| 7629 |
+
"step": 1084
|
| 7630 |
+
},
|
| 7631 |
+
{
|
| 7632 |
+
"epoch": 0.30026290300262903,
|
| 7633 |
+
"grad_norm": 0.5667163729667664,
|
| 7634 |
+
"learning_rate": 2.2585838936091754e-05,
|
| 7635 |
+
"loss": 1.2013,
|
| 7636 |
+
"step": 1085
|
| 7637 |
+
},
|
| 7638 |
+
{
|
| 7639 |
+
"epoch": 0.30053964300539643,
|
| 7640 |
+
"grad_norm": 0.6512935161590576,
|
| 7641 |
+
"learning_rate": 2.244141077665637e-05,
|
| 7642 |
+
"loss": 1.1401,
|
| 7643 |
+
"step": 1086
|
| 7644 |
+
},
|
| 7645 |
+
{
|
| 7646 |
+
"epoch": 0.3008163830081638,
|
| 7647 |
+
"grad_norm": 0.5757260918617249,
|
| 7648 |
+
"learning_rate": 2.2297387495110855e-05,
|
| 7649 |
+
"loss": 2.0484,
|
| 7650 |
+
"step": 1087
|
| 7651 |
+
},
|
| 7652 |
+
{
|
| 7653 |
+
"epoch": 0.3010931230109312,
|
| 7654 |
+
"grad_norm": 0.5581321120262146,
|
| 7655 |
+
"learning_rate": 2.2153769843297667e-05,
|
| 7656 |
+
"loss": 1.3487,
|
| 7657 |
+
"step": 1088
|
| 7658 |
+
},
|
| 7659 |
+
{
|
| 7660 |
+
"epoch": 0.3013698630136986,
|
| 7661 |
+
"grad_norm": 0.6298123002052307,
|
| 7662 |
+
"learning_rate": 2.201055857094184e-05,
|
| 7663 |
+
"loss": 1.2346,
|
| 7664 |
+
"step": 1089
|
| 7665 |
+
},
|
| 7666 |
+
{
|
| 7667 |
+
"epoch": 0.301646603016466,
|
| 7668 |
+
"grad_norm": 0.472779780626297,
|
| 7669 |
+
"learning_rate": 2.1867754425646926e-05,
|
| 7670 |
+
"loss": 1.4843,
|
| 7671 |
+
"step": 1090
|
| 7672 |
+
},
|
| 7673 |
+
{
|
| 7674 |
+
"epoch": 0.3019233430192334,
|
| 7675 |
+
"grad_norm": 0.5135462284088135,
|
| 7676 |
+
"learning_rate": 2.172535815289113e-05,
|
| 7677 |
+
"loss": 1.9218,
|
| 7678 |
+
"step": 1091
|
| 7679 |
+
},
|
| 7680 |
+
{
|
| 7681 |
+
"epoch": 0.30220008302200085,
|
| 7682 |
+
"grad_norm": 0.5767914056777954,
|
| 7683 |
+
"learning_rate": 2.1583370496023536e-05,
|
| 7684 |
+
"loss": 1.3602,
|
| 7685 |
+
"step": 1092
|
| 7686 |
+
},
|
| 7687 |
+
{
|
| 7688 |
+
"epoch": 0.30247682302476825,
|
| 7689 |
+
"grad_norm": 0.6168928742408752,
|
| 7690 |
+
"learning_rate": 2.1441792196260048e-05,
|
| 7691 |
+
"loss": 1.1344,
|
| 7692 |
+
"step": 1093
|
| 7693 |
+
},
|
| 7694 |
+
{
|
| 7695 |
+
"epoch": 0.30275356302753564,
|
| 7696 |
+
"grad_norm": 0.7433022260665894,
|
| 7697 |
+
"learning_rate": 2.130062399267966e-05,
|
| 7698 |
+
"loss": 1.5016,
|
| 7699 |
+
"step": 1094
|
| 7700 |
+
},
|
| 7701 |
+
{
|
| 7702 |
+
"epoch": 0.30303030303030304,
|
| 7703 |
+
"grad_norm": 0.6746364235877991,
|
| 7704 |
+
"learning_rate": 2.115986662222058e-05,
|
| 7705 |
+
"loss": 1.5802,
|
| 7706 |
+
"step": 1095
|
| 7707 |
+
},
|
| 7708 |
+
{
|
| 7709 |
+
"epoch": 0.30330704303307043,
|
| 7710 |
+
"grad_norm": 0.6017640233039856,
|
| 7711 |
+
"learning_rate": 2.1019520819676252e-05,
|
| 7712 |
+
"loss": 1.3884,
|
| 7713 |
+
"step": 1096
|
| 7714 |
+
},
|
| 7715 |
+
{
|
| 7716 |
+
"epoch": 0.30358378303583783,
|
| 7717 |
+
"grad_norm": 0.6644181609153748,
|
| 7718 |
+
"learning_rate": 2.0879587317691708e-05,
|
| 7719 |
+
"loss": 2.0865,
|
| 7720 |
+
"step": 1097
|
| 7721 |
+
},
|
| 7722 |
+
{
|
| 7723 |
+
"epoch": 0.3038605230386052,
|
| 7724 |
+
"grad_norm": 0.6440253853797913,
|
| 7725 |
+
"learning_rate": 2.0740066846759608e-05,
|
| 7726 |
+
"loss": 1.1766,
|
| 7727 |
+
"step": 1098
|
| 7728 |
+
},
|
| 7729 |
+
{
|
| 7730 |
+
"epoch": 0.3041372630413726,
|
| 7731 |
+
"grad_norm": 0.6029407978057861,
|
| 7732 |
+
"learning_rate": 2.0600960135216462e-05,
|
| 7733 |
+
"loss": 1.422,
|
| 7734 |
+
"step": 1099
|
| 7735 |
+
},
|
| 7736 |
+
{
|
| 7737 |
+
"epoch": 0.30441400304414,
|
| 7738 |
+
"grad_norm": 0.5927180647850037,
|
| 7739 |
+
"learning_rate": 2.0462267909238896e-05,
|
| 7740 |
+
"loss": 1.1925,
|
| 7741 |
+
"step": 1100
|
| 7742 |
+
},
|
| 7743 |
+
{
|
| 7744 |
+
"epoch": 0.3046907430469074,
|
| 7745 |
+
"grad_norm": 0.4097791612148285,
|
| 7746 |
+
"learning_rate": 2.032399089283975e-05,
|
| 7747 |
+
"loss": 1.6373,
|
| 7748 |
+
"step": 1101
|
| 7749 |
+
},
|
| 7750 |
+
{
|
| 7751 |
+
"epoch": 0.3049674830496748,
|
| 7752 |
+
"grad_norm": 0.5585319399833679,
|
| 7753 |
+
"learning_rate": 2.018612980786435e-05,
|
| 7754 |
+
"loss": 1.8651,
|
| 7755 |
+
"step": 1102
|
| 7756 |
+
},
|
| 7757 |
+
{
|
| 7758 |
+
"epoch": 0.30524422305244225,
|
| 7759 |
+
"grad_norm": 0.6618626117706299,
|
| 7760 |
+
"learning_rate": 2.0048685373986797e-05,
|
| 7761 |
+
"loss": 1.5504,
|
| 7762 |
+
"step": 1103
|
| 7763 |
+
},
|
| 7764 |
+
{
|
| 7765 |
+
"epoch": 0.30552096305520965,
|
| 7766 |
+
"grad_norm": 0.5894961953163147,
|
| 7767 |
+
"learning_rate": 1.9911658308706104e-05,
|
| 7768 |
+
"loss": 1.9009,
|
| 7769 |
+
"step": 1104
|
| 7770 |
+
},
|
| 7771 |
+
{
|
| 7772 |
+
"epoch": 0.30579770305797704,
|
| 7773 |
+
"grad_norm": 0.47861436009407043,
|
| 7774 |
+
"learning_rate": 1.9775049327342486e-05,
|
| 7775 |
+
"loss": 1.1817,
|
| 7776 |
+
"step": 1105
|
| 7777 |
+
},
|
| 7778 |
+
{
|
| 7779 |
+
"epoch": 0.30607444306074444,
|
| 7780 |
+
"grad_norm": 0.5674930214881897,
|
| 7781 |
+
"learning_rate": 1.9638859143033728e-05,
|
| 7782 |
+
"loss": 1.4227,
|
| 7783 |
+
"step": 1106
|
| 7784 |
+
},
|
| 7785 |
+
{
|
| 7786 |
+
"epoch": 0.30635118306351183,
|
| 7787 |
+
"grad_norm": 0.5324965715408325,
|
| 7788 |
+
"learning_rate": 1.9503088466731268e-05,
|
| 7789 |
+
"loss": 1.1567,
|
| 7790 |
+
"step": 1107
|
| 7791 |
+
},
|
| 7792 |
+
{
|
| 7793 |
+
"epoch": 0.30662792306627923,
|
| 7794 |
+
"grad_norm": 0.6152980923652649,
|
| 7795 |
+
"learning_rate": 1.9367738007196678e-05,
|
| 7796 |
+
"loss": 1.1969,
|
| 7797 |
+
"step": 1108
|
| 7798 |
+
},
|
| 7799 |
+
{
|
| 7800 |
+
"epoch": 0.3069046630690466,
|
| 7801 |
+
"grad_norm": 0.6439808011054993,
|
| 7802 |
+
"learning_rate": 1.9232808470997842e-05,
|
| 7803 |
+
"loss": 1.371,
|
| 7804 |
+
"step": 1109
|
| 7805 |
+
},
|
| 7806 |
+
{
|
| 7807 |
+
"epoch": 0.307181403071814,
|
| 7808 |
+
"grad_norm": 0.6107545495033264,
|
| 7809 |
+
"learning_rate": 1.9098300562505266e-05,
|
| 7810 |
+
"loss": 1.4488,
|
| 7811 |
+
"step": 1110
|
| 7812 |
+
},
|
| 7813 |
+
{
|
| 7814 |
+
"epoch": 0.3074581430745814,
|
| 7815 |
+
"grad_norm": 0.5732201933860779,
|
| 7816 |
+
"learning_rate": 1.896421498388853e-05,
|
| 7817 |
+
"loss": 1.2317,
|
| 7818 |
+
"step": 1111
|
| 7819 |
+
},
|
| 7820 |
+
{
|
| 7821 |
+
"epoch": 0.3077348830773488,
|
| 7822 |
+
"grad_norm": 0.5754370093345642,
|
| 7823 |
+
"learning_rate": 1.8830552435112447e-05,
|
| 7824 |
+
"loss": 1.44,
|
| 7825 |
+
"step": 1112
|
| 7826 |
+
},
|
| 7827 |
+
{
|
| 7828 |
+
"epoch": 0.3080116230801162,
|
| 7829 |
+
"grad_norm": 0.4767928719520569,
|
| 7830 |
+
"learning_rate": 1.8697313613933553e-05,
|
| 7831 |
+
"loss": 1.3422,
|
| 7832 |
+
"step": 1113
|
| 7833 |
+
},
|
| 7834 |
+
{
|
| 7835 |
+
"epoch": 0.30828836308288365,
|
| 7836 |
+
"grad_norm": 0.5344167351722717,
|
| 7837 |
+
"learning_rate": 1.8564499215896357e-05,
|
| 7838 |
+
"loss": 1.5493,
|
| 7839 |
+
"step": 1114
|
| 7840 |
+
},
|
| 7841 |
+
{
|
| 7842 |
+
"epoch": 0.30856510308565105,
|
| 7843 |
+
"grad_norm": 0.5752341151237488,
|
| 7844 |
+
"learning_rate": 1.8432109934329834e-05,
|
| 7845 |
+
"loss": 1.053,
|
| 7846 |
+
"step": 1115
|
| 7847 |
+
},
|
| 7848 |
+
{
|
| 7849 |
+
"epoch": 0.30884184308841844,
|
| 7850 |
+
"grad_norm": 0.5791290998458862,
|
| 7851 |
+
"learning_rate": 1.8300146460343603e-05,
|
| 7852 |
+
"loss": 1.1179,
|
| 7853 |
+
"step": 1116
|
| 7854 |
+
},
|
| 7855 |
+
{
|
| 7856 |
+
"epoch": 0.30911858309118584,
|
| 7857 |
+
"grad_norm": 0.8187316060066223,
|
| 7858 |
+
"learning_rate": 1.8168609482824594e-05,
|
| 7859 |
+
"loss": 1.0562,
|
| 7860 |
+
"step": 1117
|
| 7861 |
+
},
|
| 7862 |
+
{
|
| 7863 |
+
"epoch": 0.30939532309395323,
|
| 7864 |
+
"grad_norm": 0.506911039352417,
|
| 7865 |
+
"learning_rate": 1.8037499688433203e-05,
|
| 7866 |
+
"loss": 1.2228,
|
| 7867 |
+
"step": 1118
|
| 7868 |
+
},
|
| 7869 |
+
{
|
| 7870 |
+
"epoch": 0.30967206309672063,
|
| 7871 |
+
"grad_norm": 0.6524040102958679,
|
| 7872 |
+
"learning_rate": 1.7906817761599814e-05,
|
| 7873 |
+
"loss": 1.3115,
|
| 7874 |
+
"step": 1119
|
| 7875 |
+
},
|
| 7876 |
+
{
|
| 7877 |
+
"epoch": 0.309948803099488,
|
| 7878 |
+
"grad_norm": 0.6368042826652527,
|
| 7879 |
+
"learning_rate": 1.777656438452129e-05,
|
| 7880 |
+
"loss": 1.2892,
|
| 7881 |
+
"step": 1120
|
| 7882 |
+
},
|
| 7883 |
+
{
|
| 7884 |
+
"epoch": 0.3102255431022554,
|
| 7885 |
+
"grad_norm": 0.5353922843933105,
|
| 7886 |
+
"learning_rate": 1.7646740237157256e-05,
|
| 7887 |
+
"loss": 1.5011,
|
| 7888 |
+
"step": 1121
|
| 7889 |
+
},
|
| 7890 |
+
{
|
| 7891 |
+
"epoch": 0.3105022831050228,
|
| 7892 |
+
"grad_norm": 0.7182748317718506,
|
| 7893 |
+
"learning_rate": 1.751734599722672e-05,
|
| 7894 |
+
"loss": 1.5112,
|
| 7895 |
+
"step": 1122
|
| 7896 |
+
},
|
| 7897 |
+
{
|
| 7898 |
+
"epoch": 0.3107790231077902,
|
| 7899 |
+
"grad_norm": 0.5622672438621521,
|
| 7900 |
+
"learning_rate": 1.7388382340204378e-05,
|
| 7901 |
+
"loss": 1.825,
|
| 7902 |
+
"step": 1123
|
| 7903 |
+
},
|
| 7904 |
+
{
|
| 7905 |
+
"epoch": 0.3110557631105576,
|
| 7906 |
+
"grad_norm": 0.6315118074417114,
|
| 7907 |
+
"learning_rate": 1.7259849939317185e-05,
|
| 7908 |
+
"loss": 1.1928,
|
| 7909 |
+
"step": 1124
|
| 7910 |
+
},
|
| 7911 |
+
{
|
| 7912 |
+
"epoch": 0.31133250311332505,
|
| 7913 |
+
"grad_norm": 0.6838198304176331,
|
| 7914 |
+
"learning_rate": 1.713174946554086e-05,
|
| 7915 |
+
"loss": 1.0741,
|
| 7916 |
+
"step": 1125
|
| 7917 |
+
},
|
| 7918 |
+
{
|
| 7919 |
+
"epoch": 0.31160924311609245,
|
| 7920 |
+
"grad_norm": 0.7279945611953735,
|
| 7921 |
+
"learning_rate": 1.7004081587596266e-05,
|
| 7922 |
+
"loss": 1.2042,
|
| 7923 |
+
"step": 1126
|
| 7924 |
+
},
|
| 7925 |
+
{
|
| 7926 |
+
"epoch": 0.31188598311885984,
|
| 7927 |
+
"grad_norm": 0.4747895300388336,
|
| 7928 |
+
"learning_rate": 1.6876846971946024e-05,
|
| 7929 |
+
"loss": 1.8794,
|
| 7930 |
+
"step": 1127
|
| 7931 |
+
},
|
| 7932 |
+
{
|
| 7933 |
+
"epoch": 0.31216272312162724,
|
| 7934 |
+
"grad_norm": 0.6691758036613464,
|
| 7935 |
+
"learning_rate": 1.6750046282791053e-05,
|
| 7936 |
+
"loss": 1.1888,
|
| 7937 |
+
"step": 1128
|
| 7938 |
+
},
|
| 7939 |
+
{
|
| 7940 |
+
"epoch": 0.31243946312439463,
|
| 7941 |
+
"grad_norm": 0.46834006905555725,
|
| 7942 |
+
"learning_rate": 1.662368018206698e-05,
|
| 7943 |
+
"loss": 1.6689,
|
| 7944 |
+
"step": 1129
|
| 7945 |
+
},
|
| 7946 |
+
{
|
| 7947 |
+
"epoch": 0.31271620312716203,
|
| 7948 |
+
"grad_norm": 0.5793126225471497,
|
| 7949 |
+
"learning_rate": 1.649774932944075e-05,
|
| 7950 |
+
"loss": 1.4305,
|
| 7951 |
+
"step": 1130
|
| 7952 |
+
},
|
| 7953 |
+
{
|
| 7954 |
+
"epoch": 0.3129929431299294,
|
| 7955 |
+
"grad_norm": 0.5778106451034546,
|
| 7956 |
+
"learning_rate": 1.637225438230726e-05,
|
| 7957 |
+
"loss": 1.2552,
|
| 7958 |
+
"step": 1131
|
| 7959 |
+
},
|
| 7960 |
+
{
|
| 7961 |
+
"epoch": 0.3132696831326968,
|
| 7962 |
+
"grad_norm": 0.6928858757019043,
|
| 7963 |
+
"learning_rate": 1.6247195995785837e-05,
|
| 7964 |
+
"loss": 1.2229,
|
| 7965 |
+
"step": 1132
|
| 7966 |
+
},
|
| 7967 |
+
{
|
| 7968 |
+
"epoch": 0.3135464231354642,
|
| 7969 |
+
"grad_norm": 0.5719850063323975,
|
| 7970 |
+
"learning_rate": 1.6122574822716775e-05,
|
| 7971 |
+
"loss": 1.1756,
|
| 7972 |
+
"step": 1133
|
| 7973 |
+
},
|
| 7974 |
+
{
|
| 7975 |
+
"epoch": 0.3138231631382316,
|
| 7976 |
+
"grad_norm": 0.5407165884971619,
|
| 7977 |
+
"learning_rate": 1.5998391513658073e-05,
|
| 7978 |
+
"loss": 1.3599,
|
| 7979 |
+
"step": 1134
|
| 7980 |
+
},
|
| 7981 |
+
{
|
| 7982 |
+
"epoch": 0.314099903140999,
|
| 7983 |
+
"grad_norm": 0.5962265729904175,
|
| 7984 |
+
"learning_rate": 1.587464671688187e-05,
|
| 7985 |
+
"loss": 1.475,
|
| 7986 |
+
"step": 1135
|
| 7987 |
+
},
|
| 7988 |
+
{
|
| 7989 |
+
"epoch": 0.31437664314376645,
|
| 7990 |
+
"grad_norm": 0.6834406852722168,
|
| 7991 |
+
"learning_rate": 1.575134107837125e-05,
|
| 7992 |
+
"loss": 1.246,
|
| 7993 |
+
"step": 1136
|
| 7994 |
+
},
|
| 7995 |
+
{
|
| 7996 |
+
"epoch": 0.31465338314653385,
|
| 7997 |
+
"grad_norm": 0.5959248542785645,
|
| 7998 |
+
"learning_rate": 1.5628475241816688e-05,
|
| 7999 |
+
"loss": 1.2491,
|
| 8000 |
+
"step": 1137
|
| 8001 |
+
},
|
| 8002 |
+
{
|
| 8003 |
+
"epoch": 0.31493012314930124,
|
| 8004 |
+
"grad_norm": 0.5962563753128052,
|
| 8005 |
+
"learning_rate": 1.5506049848612746e-05,
|
| 8006 |
+
"loss": 1.5915,
|
| 8007 |
+
"step": 1138
|
| 8008 |
+
},
|
| 8009 |
+
{
|
| 8010 |
+
"epoch": 0.31520686315206864,
|
| 8011 |
+
"grad_norm": 0.5222712755203247,
|
| 8012 |
+
"learning_rate": 1.538406553785484e-05,
|
| 8013 |
+
"loss": 1.1464,
|
| 8014 |
+
"step": 1139
|
| 8015 |
+
},
|
| 8016 |
+
{
|
| 8017 |
+
"epoch": 0.31548360315483603,
|
| 8018 |
+
"grad_norm": 0.46961209177970886,
|
| 8019 |
+
"learning_rate": 1.5262522946335755e-05,
|
| 8020 |
+
"loss": 1.0375,
|
| 8021 |
+
"step": 1140
|
| 8022 |
+
},
|
| 8023 |
+
{
|
| 8024 |
+
"epoch": 0.31576034315760343,
|
| 8025 |
+
"grad_norm": 0.6175217628479004,
|
| 8026 |
+
"learning_rate": 1.5141422708542341e-05,
|
| 8027 |
+
"loss": 1.5189,
|
| 8028 |
+
"step": 1141
|
| 8029 |
+
},
|
| 8030 |
+
{
|
| 8031 |
+
"epoch": 0.3160370831603708,
|
| 8032 |
+
"grad_norm": 0.5901622772216797,
|
| 8033 |
+
"learning_rate": 1.5020765456652319e-05,
|
| 8034 |
+
"loss": 1.5087,
|
| 8035 |
+
"step": 1142
|
| 8036 |
+
},
|
| 8037 |
+
{
|
| 8038 |
+
"epoch": 0.3163138231631382,
|
| 8039 |
+
"grad_norm": 0.7963405251502991,
|
| 8040 |
+
"learning_rate": 1.4900551820530828e-05,
|
| 8041 |
+
"loss": 1.3666,
|
| 8042 |
+
"step": 1143
|
| 8043 |
+
},
|
| 8044 |
+
{
|
| 8045 |
+
"epoch": 0.3165905631659056,
|
| 8046 |
+
"grad_norm": 0.6354588270187378,
|
| 8047 |
+
"learning_rate": 1.4780782427727225e-05,
|
| 8048 |
+
"loss": 1.1931,
|
| 8049 |
+
"step": 1144
|
| 8050 |
+
},
|
| 8051 |
+
{
|
| 8052 |
+
"epoch": 0.316867303168673,
|
| 8053 |
+
"grad_norm": 0.6304844617843628,
|
| 8054 |
+
"learning_rate": 1.466145790347183e-05,
|
| 8055 |
+
"loss": 1.2578,
|
| 8056 |
+
"step": 1145
|
| 8057 |
+
},
|
| 8058 |
+
{
|
| 8059 |
+
"epoch": 0.3171440431714404,
|
| 8060 |
+
"grad_norm": 0.6053752899169922,
|
| 8061 |
+
"learning_rate": 1.4542578870672575e-05,
|
| 8062 |
+
"loss": 1.3069,
|
| 8063 |
+
"step": 1146
|
| 8064 |
+
},
|
| 8065 |
+
{
|
| 8066 |
+
"epoch": 0.31742078317420785,
|
| 8067 |
+
"grad_norm": 0.5192694664001465,
|
| 8068 |
+
"learning_rate": 1.4424145949911783e-05,
|
| 8069 |
+
"loss": 1.3658,
|
| 8070 |
+
"step": 1147
|
| 8071 |
+
},
|
| 8072 |
+
{
|
| 8073 |
+
"epoch": 0.31769752317697525,
|
| 8074 |
+
"grad_norm": 0.6608409881591797,
|
| 8075 |
+
"learning_rate": 1.4306159759443027e-05,
|
| 8076 |
+
"loss": 1.4061,
|
| 8077 |
+
"step": 1148
|
| 8078 |
+
},
|
| 8079 |
+
{
|
| 8080 |
+
"epoch": 0.31797426317974264,
|
| 8081 |
+
"grad_norm": 0.712081253528595,
|
| 8082 |
+
"learning_rate": 1.4188620915187734e-05,
|
| 8083 |
+
"loss": 1.2588,
|
| 8084 |
+
"step": 1149
|
| 8085 |
+
},
|
| 8086 |
+
{
|
| 8087 |
+
"epoch": 0.31825100318251004,
|
| 8088 |
+
"grad_norm": 0.5852346420288086,
|
| 8089 |
+
"learning_rate": 1.4071530030732095e-05,
|
| 8090 |
+
"loss": 1.5917,
|
| 8091 |
+
"step": 1150
|
| 8092 |
+
},
|
| 8093 |
+
{
|
| 8094 |
+
"epoch": 0.31852774318527743,
|
| 8095 |
+
"grad_norm": 0.5140519738197327,
|
| 8096 |
+
"learning_rate": 1.3954887717323872e-05,
|
| 8097 |
+
"loss": 1.5548,
|
| 8098 |
+
"step": 1151
|
| 8099 |
+
},
|
| 8100 |
+
{
|
| 8101 |
+
"epoch": 0.31880448318804483,
|
| 8102 |
+
"grad_norm": 0.6490625739097595,
|
| 8103 |
+
"learning_rate": 1.3838694583869039e-05,
|
| 8104 |
+
"loss": 1.1887,
|
| 8105 |
+
"step": 1152
|
| 8106 |
+
},
|
| 8107 |
+
{
|
| 8108 |
+
"epoch": 0.3190812231908122,
|
| 8109 |
+
"grad_norm": 0.5928090810775757,
|
| 8110 |
+
"learning_rate": 1.3722951236928861e-05,
|
| 8111 |
+
"loss": 1.3794,
|
| 8112 |
+
"step": 1153
|
| 8113 |
+
},
|
| 8114 |
+
{
|
| 8115 |
+
"epoch": 0.3193579631935796,
|
| 8116 |
+
"grad_norm": 0.7151668071746826,
|
| 8117 |
+
"learning_rate": 1.3607658280716473e-05,
|
| 8118 |
+
"loss": 1.9474,
|
| 8119 |
+
"step": 1154
|
| 8120 |
+
},
|
| 8121 |
+
{
|
| 8122 |
+
"epoch": 0.319634703196347,
|
| 8123 |
+
"grad_norm": 0.4617656171321869,
|
| 8124 |
+
"learning_rate": 1.3492816317093893e-05,
|
| 8125 |
+
"loss": 1.1726,
|
| 8126 |
+
"step": 1155
|
| 8127 |
+
},
|
| 8128 |
+
{
|
| 8129 |
+
"epoch": 0.3199114431991144,
|
| 8130 |
+
"grad_norm": 0.4614858329296112,
|
| 8131 |
+
"learning_rate": 1.3378425945568851e-05,
|
| 8132 |
+
"loss": 2.0029,
|
| 8133 |
+
"step": 1156
|
| 8134 |
+
},
|
| 8135 |
+
{
|
| 8136 |
+
"epoch": 0.32018818320188186,
|
| 8137 |
+
"grad_norm": 0.5248904228210449,
|
| 8138 |
+
"learning_rate": 1.326448776329161e-05,
|
| 8139 |
+
"loss": 0.951,
|
| 8140 |
+
"step": 1157
|
| 8141 |
+
},
|
| 8142 |
+
{
|
| 8143 |
+
"epoch": 0.32046492320464925,
|
| 8144 |
+
"grad_norm": 0.5982460379600525,
|
| 8145 |
+
"learning_rate": 1.3151002365051846e-05,
|
| 8146 |
+
"loss": 1.7557,
|
| 8147 |
+
"step": 1158
|
| 8148 |
+
},
|
| 8149 |
+
{
|
| 8150 |
+
"epoch": 0.32074166320741665,
|
| 8151 |
+
"grad_norm": 0.5183411836624146,
|
| 8152 |
+
"learning_rate": 1.3037970343275653e-05,
|
| 8153 |
+
"loss": 1.7663,
|
| 8154 |
+
"step": 1159
|
| 8155 |
+
},
|
| 8156 |
+
{
|
| 8157 |
+
"epoch": 0.32101840321018404,
|
| 8158 |
+
"grad_norm": 0.5864819884300232,
|
| 8159 |
+
"learning_rate": 1.2925392288022298e-05,
|
| 8160 |
+
"loss": 1.6409,
|
| 8161 |
+
"step": 1160
|
| 8162 |
+
},
|
| 8163 |
+
{
|
| 8164 |
+
"epoch": 0.32129514321295144,
|
| 8165 |
+
"grad_norm": 0.5099895596504211,
|
| 8166 |
+
"learning_rate": 1.2813268786981236e-05,
|
| 8167 |
+
"loss": 1.0785,
|
| 8168 |
+
"step": 1161
|
| 8169 |
+
},
|
| 8170 |
+
{
|
| 8171 |
+
"epoch": 0.32157188321571883,
|
| 8172 |
+
"grad_norm": 0.4891745150089264,
|
| 8173 |
+
"learning_rate": 1.2701600425469063e-05,
|
| 8174 |
+
"loss": 1.65,
|
| 8175 |
+
"step": 1162
|
| 8176 |
+
},
|
| 8177 |
+
{
|
| 8178 |
+
"epoch": 0.32184862321848623,
|
| 8179 |
+
"grad_norm": 0.6248778104782104,
|
| 8180 |
+
"learning_rate": 1.2590387786426327e-05,
|
| 8181 |
+
"loss": 1.882,
|
| 8182 |
+
"step": 1163
|
| 8183 |
+
},
|
| 8184 |
+
{
|
| 8185 |
+
"epoch": 0.3221253632212536,
|
| 8186 |
+
"grad_norm": 0.6037426590919495,
|
| 8187 |
+
"learning_rate": 1.2479631450414675e-05,
|
| 8188 |
+
"loss": 1.7136,
|
| 8189 |
+
"step": 1164
|
| 8190 |
+
},
|
| 8191 |
+
{
|
| 8192 |
+
"epoch": 0.322402103224021,
|
| 8193 |
+
"grad_norm": 0.6828086376190186,
|
| 8194 |
+
"learning_rate": 1.2369331995613665e-05,
|
| 8195 |
+
"loss": 1.445,
|
| 8196 |
+
"step": 1165
|
| 8197 |
+
},
|
| 8198 |
+
{
|
| 8199 |
+
"epoch": 0.3226788432267884,
|
| 8200 |
+
"grad_norm": 0.5470522046089172,
|
| 8201 |
+
"learning_rate": 1.2259489997817775e-05,
|
| 8202 |
+
"loss": 1.2489,
|
| 8203 |
+
"step": 1166
|
| 8204 |
+
},
|
| 8205 |
+
{
|
| 8206 |
+
"epoch": 0.3229555832295558,
|
| 8207 |
+
"grad_norm": 0.5869041085243225,
|
| 8208 |
+
"learning_rate": 1.2150106030433517e-05,
|
| 8209 |
+
"loss": 1.7443,
|
| 8210 |
+
"step": 1167
|
| 8211 |
+
},
|
| 8212 |
+
{
|
| 8213 |
+
"epoch": 0.32323232323232326,
|
| 8214 |
+
"grad_norm": 0.6043540835380554,
|
| 8215 |
+
"learning_rate": 1.204118066447627e-05,
|
| 8216 |
+
"loss": 1.1914,
|
| 8217 |
+
"step": 1168
|
| 8218 |
+
},
|
| 8219 |
+
{
|
| 8220 |
+
"epoch": 0.32350906323509065,
|
| 8221 |
+
"grad_norm": 0.592818558216095,
|
| 8222 |
+
"learning_rate": 1.1932714468567463e-05,
|
| 8223 |
+
"loss": 1.3923,
|
| 8224 |
+
"step": 1169
|
| 8225 |
+
},
|
| 8226 |
+
{
|
| 8227 |
+
"epoch": 0.32378580323785805,
|
| 8228 |
+
"grad_norm": 0.5842121839523315,
|
| 8229 |
+
"learning_rate": 1.1824708008931418e-05,
|
| 8230 |
+
"loss": 1.6263,
|
| 8231 |
+
"step": 1170
|
| 8232 |
+
},
|
| 8233 |
+
{
|
| 8234 |
+
"epoch": 0.32406254324062544,
|
| 8235 |
+
"grad_norm": 0.7161816954612732,
|
| 8236 |
+
"learning_rate": 1.1717161849392632e-05,
|
| 8237 |
+
"loss": 2.0403,
|
| 8238 |
+
"step": 1171
|
| 8239 |
+
},
|
| 8240 |
+
{
|
| 8241 |
+
"epoch": 0.32433928324339284,
|
| 8242 |
+
"grad_norm": 0.44865351915359497,
|
| 8243 |
+
"learning_rate": 1.1610076551372584e-05,
|
| 8244 |
+
"loss": 1.443,
|
| 8245 |
+
"step": 1172
|
| 8246 |
+
},
|
| 8247 |
+
{
|
| 8248 |
+
"epoch": 0.32461602324616023,
|
| 8249 |
+
"grad_norm": 0.6594038009643555,
|
| 8250 |
+
"learning_rate": 1.1503452673887017e-05,
|
| 8251 |
+
"loss": 1.2372,
|
| 8252 |
+
"step": 1173
|
| 8253 |
+
},
|
| 8254 |
+
{
|
| 8255 |
+
"epoch": 0.32489276324892763,
|
| 8256 |
+
"grad_norm": 0.5554863214492798,
|
| 8257 |
+
"learning_rate": 1.1397290773542857e-05,
|
| 8258 |
+
"loss": 1.5005,
|
| 8259 |
+
"step": 1174
|
| 8260 |
+
},
|
| 8261 |
+
{
|
| 8262 |
+
"epoch": 0.325169503251695,
|
| 8263 |
+
"grad_norm": 0.6297761797904968,
|
| 8264 |
+
"learning_rate": 1.1291591404535462e-05,
|
| 8265 |
+
"loss": 1.9467,
|
| 8266 |
+
"step": 1175
|
| 8267 |
+
},
|
| 8268 |
+
{
|
| 8269 |
+
"epoch": 0.3254462432544624,
|
| 8270 |
+
"grad_norm": 0.5325335264205933,
|
| 8271 |
+
"learning_rate": 1.1186355118645554e-05,
|
| 8272 |
+
"loss": 1.5856,
|
| 8273 |
+
"step": 1176
|
| 8274 |
+
},
|
| 8275 |
+
{
|
| 8276 |
+
"epoch": 0.3257229832572298,
|
| 8277 |
+
"grad_norm": 0.5684744715690613,
|
| 8278 |
+
"learning_rate": 1.1081582465236461e-05,
|
| 8279 |
+
"loss": 1.1486,
|
| 8280 |
+
"step": 1177
|
| 8281 |
+
},
|
| 8282 |
+
{
|
| 8283 |
+
"epoch": 0.3259997232599972,
|
| 8284 |
+
"grad_norm": 0.5970451831817627,
|
| 8285 |
+
"learning_rate": 1.0977273991251269e-05,
|
| 8286 |
+
"loss": 1.0152,
|
| 8287 |
+
"step": 1178
|
| 8288 |
+
},
|
| 8289 |
+
{
|
| 8290 |
+
"epoch": 0.32627646326276466,
|
| 8291 |
+
"grad_norm": 0.45452234148979187,
|
| 8292 |
+
"learning_rate": 1.0873430241209847e-05,
|
| 8293 |
+
"loss": 1.3523,
|
| 8294 |
+
"step": 1179
|
| 8295 |
+
},
|
| 8296 |
+
{
|
| 8297 |
+
"epoch": 0.32655320326553205,
|
| 8298 |
+
"grad_norm": 0.5517742037773132,
|
| 8299 |
+
"learning_rate": 1.0770051757206079e-05,
|
| 8300 |
+
"loss": 1.2846,
|
| 8301 |
+
"step": 1180
|
| 8302 |
+
},
|
| 8303 |
+
{
|
| 8304 |
+
"epoch": 0.32682994326829945,
|
| 8305 |
+
"grad_norm": 0.5488953590393066,
|
| 8306 |
+
"learning_rate": 1.0667139078905086e-05,
|
| 8307 |
+
"loss": 1.0169,
|
| 8308 |
+
"step": 1181
|
| 8309 |
+
},
|
| 8310 |
+
{
|
| 8311 |
+
"epoch": 0.32710668327106684,
|
| 8312 |
+
"grad_norm": 0.5730831623077393,
|
| 8313 |
+
"learning_rate": 1.0564692743540294e-05,
|
| 8314 |
+
"loss": 1.0986,
|
| 8315 |
+
"step": 1182
|
| 8316 |
+
},
|
| 8317 |
+
{
|
| 8318 |
+
"epoch": 0.32738342327383424,
|
| 8319 |
+
"grad_norm": 0.5135382413864136,
|
| 8320 |
+
"learning_rate": 1.0462713285910685e-05,
|
| 8321 |
+
"loss": 1.2177,
|
| 8322 |
+
"step": 1183
|
| 8323 |
+
},
|
| 8324 |
+
{
|
| 8325 |
+
"epoch": 0.32766016327660163,
|
| 8326 |
+
"grad_norm": 0.5023612380027771,
|
| 8327 |
+
"learning_rate": 1.0361201238378071e-05,
|
| 8328 |
+
"loss": 1.6559,
|
| 8329 |
+
"step": 1184
|
| 8330 |
+
},
|
| 8331 |
+
{
|
| 8332 |
+
"epoch": 0.32793690327936903,
|
| 8333 |
+
"grad_norm": 0.5821080803871155,
|
| 8334 |
+
"learning_rate": 1.026015713086418e-05,
|
| 8335 |
+
"loss": 1.6826,
|
| 8336 |
+
"step": 1185
|
| 8337 |
+
},
|
| 8338 |
+
{
|
| 8339 |
+
"epoch": 0.3282136432821364,
|
| 8340 |
+
"grad_norm": 0.5665150880813599,
|
| 8341 |
+
"learning_rate": 1.015958149084797e-05,
|
| 8342 |
+
"loss": 1.0641,
|
| 8343 |
+
"step": 1186
|
| 8344 |
+
},
|
| 8345 |
+
{
|
| 8346 |
+
"epoch": 0.3284903832849038,
|
| 8347 |
+
"grad_norm": 0.5677096247673035,
|
| 8348 |
+
"learning_rate": 1.0059474843362892e-05,
|
| 8349 |
+
"loss": 1.1591,
|
| 8350 |
+
"step": 1187
|
| 8351 |
+
},
|
| 8352 |
+
{
|
| 8353 |
+
"epoch": 0.3287671232876712,
|
| 8354 |
+
"grad_norm": 0.541776180267334,
|
| 8355 |
+
"learning_rate": 9.959837710994147e-06,
|
| 8356 |
+
"loss": 1.9488,
|
| 8357 |
+
"step": 1188
|
| 8358 |
+
},
|
| 8359 |
+
{
|
| 8360 |
+
"epoch": 0.3290438632904386,
|
| 8361 |
+
"grad_norm": 0.5750060677528381,
|
| 8362 |
+
"learning_rate": 9.860670613875844e-06,
|
| 8363 |
+
"loss": 1.1069,
|
| 8364 |
+
"step": 1189
|
| 8365 |
+
},
|
| 8366 |
+
{
|
| 8367 |
+
"epoch": 0.32932060329320606,
|
| 8368 |
+
"grad_norm": 0.5637800693511963,
|
| 8369 |
+
"learning_rate": 9.761974069688461e-06,
|
| 8370 |
+
"loss": 1.5834,
|
| 8371 |
+
"step": 1190
|
| 8372 |
+
},
|
| 8373 |
+
{
|
| 8374 |
+
"epoch": 0.32959734329597346,
|
| 8375 |
+
"grad_norm": 0.5580936074256897,
|
| 8376 |
+
"learning_rate": 9.663748593656007e-06,
|
| 8377 |
+
"loss": 1.0507,
|
| 8378 |
+
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.32987408329874085,
|
| 8382 |
+
"grad_norm": 0.5923212170600891,
|
| 8383 |
+
"learning_rate": 9.565994698543435e-06,
|
| 8384 |
+
"loss": 1.3001,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.33015082330150824,
|
| 8389 |
+
"grad_norm": 0.4363764524459839,
|
| 8390 |
+
"learning_rate": 9.46871289465383e-06,
|
| 8391 |
+
"loss": 1.4202,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.33042756330427564,
|
| 8396 |
+
"grad_norm": 0.5848160982131958,
|
| 8397 |
+
"learning_rate": 9.371903689825878e-06,
|
| 8398 |
+
"loss": 1.0789,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.33070430330704303,
|
| 8403 |
+
"grad_norm": 0.5929882526397705,
|
| 8404 |
+
"learning_rate": 9.275567589431178e-06,
|
| 8405 |
+
"loss": 1.087,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.33098104330981043,
|
| 8410 |
+
"grad_norm": 0.560107409954071,
|
| 8411 |
+
"learning_rate": 9.17970509637156e-06,
|
| 8412 |
+
"loss": 1.9408,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.3312577833125778,
|
| 8417 |
+
"grad_norm": 0.534440279006958,
|
| 8418 |
+
"learning_rate": 9.084316711076469e-06,
|
| 8419 |
+
"loss": 1.9992,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.3315345233153452,
|
| 8424 |
+
"grad_norm": 0.6312685608863831,
|
| 8425 |
+
"learning_rate": 8.989402931500434e-06,
|
| 8426 |
+
"loss": 1.1609,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.3318112633181126,
|
| 8431 |
+
"grad_norm": 0.6232370138168335,
|
| 8432 |
+
"learning_rate": 8.894964253120375e-06,
|
| 8433 |
+
"loss": 1.4713,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.33208800332088,
|
| 8438 |
+
"grad_norm": 0.6147283911705017,
|
| 8439 |
+
"learning_rate": 8.80100116893301e-06,
|
| 8440 |
+
"loss": 1.0985,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.33236474332364746,
|
| 8445 |
+
"grad_norm": 0.5115309953689575,
|
| 8446 |
+
"learning_rate": 8.707514169452401e-06,
|
| 8447 |
+
"loss": 1.7642,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.33264148332641486,
|
| 8452 |
+
"grad_norm": 0.5633198022842407,
|
| 8453 |
+
"learning_rate": 8.614503742707237e-06,
|
| 8454 |
+
"loss": 1.8282,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.33291822332918225,
|
| 8459 |
+
"grad_norm": 0.4428837299346924,
|
| 8460 |
+
"learning_rate": 8.521970374238408e-06,
|
| 8461 |
+
"loss": 1.2076,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.33319496333194965,
|
| 8466 |
+
"grad_norm": 0.493984580039978,
|
| 8467 |
+
"learning_rate": 8.429914547096418e-06,
|
| 8468 |
+
"loss": 1.0828,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.33347170333471704,
|
| 8473 |
+
"grad_norm": 0.46048974990844727,
|
| 8474 |
+
"learning_rate": 8.338336741838838e-06,
|
| 8475 |
+
"loss": 1.2034,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.33374844333748444,
|
| 8480 |
+
"grad_norm": 0.6456495523452759,
|
| 8481 |
+
"learning_rate": 8.247237436527876e-06,
|
| 8482 |
+
"loss": 1.0909,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.33402518334025183,
|
| 8487 |
+
"grad_norm": 0.5162082314491272,
|
| 8488 |
+
"learning_rate": 8.156617106727782e-06,
|
| 8489 |
+
"loss": 1.4601,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.3343019233430192,
|
| 8494 |
+
"grad_norm": 0.5720396637916565,
|
| 8495 |
+
"learning_rate": 8.066476225502462e-06,
|
| 8496 |
+
"loss": 1.4244,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.3345786633457866,
|
| 8501 |
+
"grad_norm": 0.49543076753616333,
|
| 8502 |
+
"learning_rate": 7.976815263412963e-06,
|
| 8503 |
+
"loss": 1.3309,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.334855403348554,
|
| 8508 |
+
"grad_norm": 0.4815376102924347,
|
| 8509 |
+
"learning_rate": 7.887634688515e-06,
|
| 8510 |
+
"loss": 1.4334,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.3351321433513214,
|
| 8515 |
+
"grad_norm": 0.6054530739784241,
|
| 8516 |
+
"learning_rate": 7.798934966356487e-06,
|
| 8517 |
+
"loss": 1.5325,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.33540888335408886,
|
| 8522 |
+
"grad_norm": 0.6637771725654602,
|
| 8523 |
+
"learning_rate": 7.710716559975261e-06,
|
| 8524 |
+
"loss": 1.4843,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.33568562335685626,
|
| 8529 |
+
"grad_norm": 0.5178202390670776,
|
| 8530 |
+
"learning_rate": 7.6229799298964565e-06,
|
| 8531 |
+
"loss": 1.1405,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.33596236335962365,
|
| 8536 |
+
"grad_norm": 0.43340736627578735,
|
| 8537 |
+
"learning_rate": 7.535725534130211e-06,
|
| 8538 |
+
"loss": 1.1406,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.33623910336239105,
|
| 8543 |
+
"grad_norm": 0.5664601922035217,
|
| 8544 |
+
"learning_rate": 7.448953828169314e-06,
|
| 8545 |
+
"loss": 1.2705,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.33651584336515844,
|
| 8550 |
+
"grad_norm": 0.4454247057437897,
|
| 8551 |
+
"learning_rate": 7.362665264986723e-06,
|
| 8552 |
+
"loss": 1.2973,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.33679258336792584,
|
| 8557 |
+
"grad_norm": 0.6069696545600891,
|
| 8558 |
+
"learning_rate": 7.2768602950333056e-06,
|
| 8559 |
+
"loss": 1.2495,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.33706932337069323,
|
| 8564 |
+
"grad_norm": 0.5403276085853577,
|
| 8565 |
+
"learning_rate": 7.191539366235378e-06,
|
| 8566 |
+
"loss": 1.6271,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.3373460633734606,
|
| 8571 |
+
"grad_norm": 0.6068305373191833,
|
| 8572 |
+
"learning_rate": 7.106702923992437e-06,
|
| 8573 |
+
"loss": 1.2768,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.337622803376228,
|
| 8578 |
+
"grad_norm": 0.5657030940055847,
|
| 8579 |
+
"learning_rate": 7.022351411174866e-06,
|
| 8580 |
+
"loss": 1.5107,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.3378995433789954,
|
| 8585 |
+
"grad_norm": 0.6420198678970337,
|
| 8586 |
+
"learning_rate": 6.938485268121542e-06,
|
| 8587 |
+
"loss": 1.436,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.3381762833817628,
|
| 8592 |
+
"grad_norm": 0.5612906217575073,
|
| 8593 |
+
"learning_rate": 6.855104932637546e-06,
|
| 8594 |
+
"loss": 1.6718,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.33845302338453026,
|
| 8599 |
+
"grad_norm": 0.4951707720756531,
|
| 8600 |
+
"learning_rate": 6.772210839991988e-06,
|
| 8601 |
+
"loss": 1.6497,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.33872976338729766,
|
| 8606 |
+
"grad_norm": 0.6976304650306702,
|
| 8607 |
+
"learning_rate": 6.689803422915564e-06,
|
| 8608 |
+
"loss": 1.582,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.33900650339006505,
|
| 8613 |
+
"grad_norm": 0.4699346125125885,
|
| 8614 |
+
"learning_rate": 6.607883111598445e-06,
|
| 8615 |
+
"loss": 1.5691,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.33928324339283245,
|
| 8620 |
+
"grad_norm": 0.5255648493766785,
|
| 8621 |
+
"learning_rate": 6.526450333687939e-06,
|
| 8622 |
+
"loss": 1.7337,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.33955998339559984,
|
| 8627 |
+
"grad_norm": 0.5494278073310852,
|
| 8628 |
+
"learning_rate": 6.445505514286321e-06,
|
| 8629 |
+
"loss": 1.3824,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.33983672339836724,
|
| 8634 |
+
"grad_norm": 0.7608486413955688,
|
| 8635 |
+
"learning_rate": 6.3650490759485374e-06,
|
| 8636 |
+
"loss": 1.7047,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.34011346340113463,
|
| 8641 |
+
"grad_norm": 0.599463939666748,
|
| 8642 |
+
"learning_rate": 6.285081438680107e-06,
|
| 8643 |
+
"loss": 1.2716,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.340390203403902,
|
| 8648 |
+
"grad_norm": 0.5069746375083923,
|
| 8649 |
+
"learning_rate": 6.205603019934791e-06,
|
| 8650 |
+
"loss": 1.5201,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.3406669434066694,
|
| 8655 |
+
"grad_norm": 0.4424389600753784,
|
| 8656 |
+
"learning_rate": 6.126614234612593e-06,
|
| 8657 |
+
"loss": 0.9656,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.3409436834094368,
|
| 8662 |
+
"grad_norm": 0.536368191242218,
|
| 8663 |
+
"learning_rate": 6.048115495057394e-06,
|
| 8664 |
+
"loss": 1.4471,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.3412204234122042,
|
| 8669 |
+
"grad_norm": 0.5330540537834167,
|
| 8670 |
+
"learning_rate": 5.970107211054932e-06,
|
| 8671 |
+
"loss": 1.385,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.34149716341497166,
|
| 8676 |
+
"grad_norm": 0.5308576822280884,
|
| 8677 |
+
"learning_rate": 5.89258978983066e-06,
|
| 8678 |
+
"loss": 1.4702,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.34177390341773906,
|
| 8683 |
+
"grad_norm": 0.6227068305015564,
|
| 8684 |
+
"learning_rate": 5.8155636360475385e-06,
|
| 8685 |
+
"loss": 1.374,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.34205064342050645,
|
| 8690 |
+
"grad_norm": 0.5738956332206726,
|
| 8691 |
+
"learning_rate": 5.739029151803988e-06,
|
| 8692 |
+
"loss": 1.6628,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.34232738342327385,
|
| 8697 |
+
"grad_norm": 0.5956830978393555,
|
| 8698 |
+
"learning_rate": 5.6629867366318036e-06,
|
| 8699 |
+
"loss": 1.5207,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.34260412342604124,
|
| 8704 |
+
"grad_norm": 0.5596936941146851,
|
| 8705 |
+
"learning_rate": 5.587436787493994e-06,
|
| 8706 |
+
"loss": 1.0807,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.34288086342880864,
|
| 8711 |
+
"grad_norm": 0.9277949333190918,
|
| 8712 |
+
"learning_rate": 5.512379698782777e-06,
|
| 8713 |
+
"loss": 1.2116,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.34315760343157603,
|
| 8718 |
+
"grad_norm": 0.537907063961029,
|
| 8719 |
+
"learning_rate": 5.437815862317519e-06,
|
| 8720 |
+
"loss": 1.1092,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.3434343434343434,
|
| 8725 |
+
"grad_norm": 0.6063835024833679,
|
| 8726 |
+
"learning_rate": 5.363745667342624e-06,
|
| 8727 |
+
"loss": 1.304,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.3437110834371108,
|
| 8732 |
+
"grad_norm": 0.9204519391059875,
|
| 8733 |
+
"learning_rate": 5.290169500525577e-06,
|
| 8734 |
+
"loss": 1.3467,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.3439878234398782,
|
| 8739 |
+
"grad_norm": 0.7153889536857605,
|
| 8740 |
+
"learning_rate": 5.217087745954896e-06,
|
| 8741 |
+
"loss": 1.1957,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.3442645634426456,
|
| 8746 |
+
"grad_norm": 0.5362898111343384,
|
| 8747 |
+
"learning_rate": 5.14450078513814e-06,
|
| 8748 |
+
"loss": 1.0986,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.34454130344541306,
|
| 8753 |
+
"grad_norm": 0.5722021460533142,
|
| 8754 |
+
"learning_rate": 5.072408996999844e-06,
|
| 8755 |
+
"loss": 1.5624,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.34481804344818046,
|
| 8760 |
+
"grad_norm": 0.5441586971282959,
|
| 8761 |
+
"learning_rate": 5.00081275787968e-06,
|
| 8762 |
+
"loss": 1.216,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.34509478345094785,
|
| 8767 |
+
"grad_norm": 0.7203711271286011,
|
| 8768 |
+
"learning_rate": 4.929712441530343e-06,
|
| 8769 |
+
"loss": 1.2957,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.34537152345371525,
|
| 8774 |
+
"grad_norm": 0.7094559669494629,
|
| 8775 |
+
"learning_rate": 4.859108419115732e-06,
|
| 8776 |
+
"loss": 1.1409,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.34564826345648264,
|
| 8781 |
+
"grad_norm": 0.5927663445472717,
|
| 8782 |
+
"learning_rate": 4.789001059208909e-06,
|
| 8783 |
+
"loss": 1.0888,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.34592500345925004,
|
| 8788 |
+
"grad_norm": 0.6818047165870667,
|
| 8789 |
+
"learning_rate": 4.719390727790218e-06,
|
| 8790 |
+
"loss": 1.4806,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.34620174346201743,
|
| 8795 |
+
"grad_norm": 0.5493994355201721,
|
| 8796 |
+
"learning_rate": 4.650277788245394e-06,
|
| 8797 |
+
"loss": 1.1163,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.3464784834647848,
|
| 8802 |
+
"grad_norm": 0.7363675236701965,
|
| 8803 |
+
"learning_rate": 4.5816626013636345e-06,
|
| 8804 |
+
"loss": 1.1066,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.3467552234675522,
|
| 8809 |
+
"grad_norm": 0.6546116471290588,
|
| 8810 |
+
"learning_rate": 4.513545525335705e-06,
|
| 8811 |
+
"loss": 1.4653,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.3470319634703196,
|
| 8816 |
+
"grad_norm": 0.6130459308624268,
|
| 8817 |
+
"learning_rate": 4.445926915752141e-06,
|
| 8818 |
+
"loss": 1.0907,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.347308703473087,
|
| 8823 |
+
"grad_norm": 0.6596401333808899,
|
| 8824 |
+
"learning_rate": 4.378807125601303e-06,
|
| 8825 |
+
"loss": 1.0963,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.34758544347585446,
|
| 8830 |
+
"grad_norm": 0.5201833248138428,
|
| 8831 |
+
"learning_rate": 4.312186505267568e-06,
|
| 8832 |
+
"loss": 1.5553,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.34786218347862186,
|
| 8837 |
+
"grad_norm": 0.6547181010246277,
|
| 8838 |
+
"learning_rate": 4.2460654025295425e-06,
|
| 8839 |
+
"loss": 1.6685,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.34813892348138925,
|
| 8844 |
+
"grad_norm": 0.6214966773986816,
|
| 8845 |
+
"learning_rate": 4.18044416255815e-06,
|
| 8846 |
+
"loss": 1.3736,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.34841566348415665,
|
| 8851 |
+
"grad_norm": 0.6374169588088989,
|
| 8852 |
+
"learning_rate": 4.115323127914961e-06,
|
| 8853 |
+
"loss": 1.1412,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.34869240348692404,
|
| 8858 |
+
"grad_norm": 0.5468176007270813,
|
| 8859 |
+
"learning_rate": 4.050702638550275e-06,
|
| 8860 |
+
"loss": 1.0697,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.34896914348969144,
|
| 8865 |
+
"grad_norm": 0.6127582788467407,
|
| 8866 |
+
"learning_rate": 3.986583031801405e-06,
|
| 8867 |
+
"loss": 1.1092,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.34924588349245883,
|
| 8872 |
+
"grad_norm": 0.5807528495788574,
|
| 8873 |
+
"learning_rate": 3.922964642390969e-06,
|
| 8874 |
+
"loss": 1.3098,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.3495226234952262,
|
| 8879 |
+
"grad_norm": 0.5624070763587952,
|
| 8880 |
+
"learning_rate": 3.859847802425009e-06,
|
| 8881 |
+
"loss": 1.6435,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.3497993634979936,
|
| 8886 |
+
"grad_norm": 0.7072883248329163,
|
| 8887 |
+
"learning_rate": 3.797232841391407e-06,
|
| 8888 |
+
"loss": 1.246,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.350076103500761,
|
| 8893 |
+
"grad_norm": 0.7226585745811462,
|
| 8894 |
+
"learning_rate": 3.7351200861580617e-06,
|
| 8895 |
+
"loss": 1.652,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.3503528435035284,
|
| 8900 |
+
"grad_norm": 0.6208769083023071,
|
| 8901 |
+
"learning_rate": 3.6735098609712137e-06,
|
| 8902 |
+
"loss": 1.3876,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.35062958350629586,
|
| 8907 |
+
"grad_norm": 0.5284237861633301,
|
| 8908 |
+
"learning_rate": 3.6124024874537366e-06,
|
| 8909 |
+
"loss": 1.7114,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.35090632350906326,
|
| 8914 |
+
"grad_norm": 0.512886643409729,
|
| 8915 |
+
"learning_rate": 3.5517982846035357e-06,
|
| 8916 |
+
"loss": 1.1513,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.35118306351183065,
|
| 8921 |
+
"grad_norm": 0.4237596094608307,
|
| 8922 |
+
"learning_rate": 3.491697568791752e-06,
|
| 8923 |
+
"loss": 1.4596,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.35145980351459805,
|
| 8928 |
+
"grad_norm": 0.6409422755241394,
|
| 8929 |
+
"learning_rate": 3.4321006537612165e-06,
|
| 8930 |
+
"loss": 1.9398,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.35173654351736544,
|
| 8935 |
+
"grad_norm": 0.5703920722007751,
|
| 8936 |
+
"learning_rate": 3.3730078506247876e-06,
|
| 8937 |
+
"loss": 1.1196,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.35201328352013284,
|
| 8942 |
+
"grad_norm": 0.5316205620765686,
|
| 8943 |
+
"learning_rate": 3.314419467863672e-06,
|
| 8944 |
+
"loss": 1.9507,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.35229002352290023,
|
| 8949 |
+
"grad_norm": 0.6559683084487915,
|
| 8950 |
+
"learning_rate": 3.2563358113259277e-06,
|
| 8951 |
+
"loss": 1.0825,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.3525667635256676,
|
| 8956 |
+
"grad_norm": 0.6180990934371948,
|
| 8957 |
+
"learning_rate": 3.198757184224732e-06,
|
| 8958 |
+
"loss": 1.0632,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.352843503528435,
|
| 8963 |
+
"grad_norm": 0.606361448764801,
|
| 8964 |
+
"learning_rate": 3.1416838871368924e-06,
|
| 8965 |
+
"loss": 1.1952,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.3531202435312024,
|
| 8970 |
+
"grad_norm": 0.5028677582740784,
|
| 8971 |
+
"learning_rate": 3.0851162180012495e-06,
|
| 8972 |
+
"loss": 1.4651,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.3533969835339698,
|
| 8977 |
+
"grad_norm": 0.6076905131340027,
|
| 8978 |
+
"learning_rate": 3.029054472117132e-06,
|
| 8979 |
+
"loss": 1.1204,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.35367372353673726,
|
| 8984 |
+
"grad_norm": 0.6184888482093811,
|
| 8985 |
+
"learning_rate": 2.973498942142783e-06,
|
| 8986 |
+
"loss": 1.3755,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.35395046353950466,
|
| 8991 |
+
"grad_norm": 0.4860801100730896,
|
| 8992 |
+
"learning_rate": 2.9184499180938685e-06,
|
| 8993 |
+
"loss": 1.2322,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.35422720354227205,
|
| 8998 |
+
"grad_norm": 0.546761155128479,
|
| 8999 |
+
"learning_rate": 2.863907687341949e-06,
|
| 9000 |
+
"loss": 1.0441,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.35450394354503945,
|
| 9005 |
+
"grad_norm": 0.521815836429596,
|
| 9006 |
+
"learning_rate": 2.8098725346129894e-06,
|
| 9007 |
+
"loss": 1.5829,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.35478068354780684,
|
| 9012 |
+
"grad_norm": 0.5151076912879944,
|
| 9013 |
+
"learning_rate": 2.756344741985817e-06,
|
| 9014 |
+
"loss": 1.4878,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.35505742355057424,
|
| 9019 |
+
"grad_norm": 0.8829789161682129,
|
| 9020 |
+
"learning_rate": 2.7033245888907765e-06,
|
| 9021 |
+
"loss": 1.2228,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.35533416355334163,
|
| 9026 |
+
"grad_norm": 0.5310692191123962,
|
| 9027 |
+
"learning_rate": 2.650812352108112e-06,
|
| 9028 |
+
"loss": 1.7522,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.355610903556109,
|
| 9033 |
+
"grad_norm": 0.5059064626693726,
|
| 9034 |
+
"learning_rate": 2.5988083057666533e-06,
|
| 9035 |
+
"loss": 1.414,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.3558876435588764,
|
| 9040 |
+
"grad_norm": 0.5747556090354919,
|
| 9041 |
+
"learning_rate": 2.5473127213422763e-06,
|
| 9042 |
+
"loss": 1.3852,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.3561643835616438,
|
| 9047 |
+
"grad_norm": 0.5749861001968384,
|
| 9048 |
+
"learning_rate": 2.496325867656624e-06,
|
| 9049 |
+
"loss": 1.2376,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.3564411235644112,
|
| 9054 |
+
"grad_norm": 0.6638109683990479,
|
| 9055 |
+
"learning_rate": 2.4458480108755287e-06,
|
| 9056 |
+
"loss": 1.5775,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.35671786356717866,
|
| 9061 |
+
"grad_norm": 0.5741908550262451,
|
| 9062 |
+
"learning_rate": 2.3958794145077622e-06,
|
| 9063 |
+
"loss": 1.5209,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.35699460356994606,
|
| 9068 |
+
"grad_norm": 0.5471376180648804,
|
| 9069 |
+
"learning_rate": 2.3464203394036322e-06,
|
| 9070 |
+
"loss": 1.1939,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.35727134357271345,
|
| 9075 |
+
"grad_norm": 0.44931599497795105,
|
| 9076 |
+
"learning_rate": 2.297471043753552e-06,
|
| 9077 |
+
"loss": 1.582,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.35754808357548085,
|
| 9082 |
+
"grad_norm": 0.5139139294624329,
|
| 9083 |
+
"learning_rate": 2.2490317830867635e-06,
|
| 9084 |
+
"loss": 1.6963,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.35782482357824824,
|
| 9089 |
+
"grad_norm": 0.5607603788375854,
|
| 9090 |
+
"learning_rate": 2.201102810269995e-06,
|
| 9091 |
+
"loss": 1.4019,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.35810156358101564,
|
| 9096 |
+
"grad_norm": 0.6119531989097595,
|
| 9097 |
+
"learning_rate": 2.1536843755061043e-06,
|
| 9098 |
+
"loss": 1.1306,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.35837830358378303,
|
| 9103 |
+
"grad_norm": 0.5319027304649353,
|
| 9104 |
+
"learning_rate": 2.1067767263327933e-06,
|
| 9105 |
+
"loss": 0.9546,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.3586550435865504,
|
| 9110 |
+
"grad_norm": 0.6079663634300232,
|
| 9111 |
+
"learning_rate": 2.0603801076213623e-06,
|
| 9112 |
+
"loss": 1.3224,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.3589317835893178,
|
| 9117 |
+
"grad_norm": 0.5968743562698364,
|
| 9118 |
+
"learning_rate": 2.014494761575314e-06,
|
| 9119 |
+
"loss": 1.4332,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.3592085235920852,
|
| 9124 |
+
"grad_norm": 0.5699344873428345,
|
| 9125 |
+
"learning_rate": 1.969120927729229e-06,
|
| 9126 |
+
"loss": 1.2226,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.3594852635948526,
|
| 9131 |
+
"grad_norm": 0.5667832493782043,
|
| 9132 |
+
"learning_rate": 1.924258842947424e-06,
|
| 9133 |
+
"loss": 1.723,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.35976200359762006,
|
| 9138 |
+
"grad_norm": 0.7314775586128235,
|
| 9139 |
+
"learning_rate": 1.87990874142272e-06,
|
| 9140 |
+
"loss": 1.7885,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.36003874360038746,
|
| 9145 |
+
"grad_norm": 0.5974516868591309,
|
| 9146 |
+
"learning_rate": 1.8360708546752646e-06,
|
| 9147 |
+
"loss": 1.2607,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.36031548360315485,
|
| 9152 |
+
"grad_norm": 0.580694317817688,
|
| 9153 |
+
"learning_rate": 1.7927454115512997e-06,
|
| 9154 |
+
"loss": 1.1211,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.36059222360592225,
|
| 9159 |
+
"grad_norm": 0.6023333072662354,
|
| 9160 |
+
"learning_rate": 1.7499326382219516e-06,
|
| 9161 |
+
"loss": 1.0925,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.36086896360868964,
|
| 9166 |
+
"grad_norm": 0.8655004501342773,
|
| 9167 |
+
"learning_rate": 1.707632758182076e-06,
|
| 9168 |
+
"loss": 1.246,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.36114570361145704,
|
| 9173 |
+
"grad_norm": 0.6250109076499939,
|
| 9174 |
+
"learning_rate": 1.665845992249071e-06,
|
| 9175 |
+
"loss": 1.8331,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.36142244361422443,
|
| 9180 |
+
"grad_norm": 0.5616446137428284,
|
| 9181 |
+
"learning_rate": 1.6245725585617322e-06,
|
| 9182 |
+
"loss": 1.6006,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.3616991836169918,
|
| 9187 |
+
"grad_norm": 0.6929317116737366,
|
| 9188 |
+
"learning_rate": 1.5838126725791435e-06,
|
| 9189 |
+
"loss": 1.5583,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.3619759236197592,
|
| 9194 |
+
"grad_norm": 0.5980032682418823,
|
| 9195 |
+
"learning_rate": 1.543566547079467e-06,
|
| 9196 |
+
"loss": 1.336,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.3622526636225266,
|
| 9201 |
+
"grad_norm": 0.6713624000549316,
|
| 9202 |
+
"learning_rate": 1.5038343921589315e-06,
|
| 9203 |
+
"loss": 1.1475,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.362529403625294,
|
| 9208 |
+
"grad_norm": 0.6368204951286316,
|
| 9209 |
+
"learning_rate": 1.4646164152307018e-06,
|
| 9210 |
+
"loss": 1.2424,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.36280614362806146,
|
| 9215 |
+
"grad_norm": 0.46106165647506714,
|
| 9216 |
+
"learning_rate": 1.4259128210237449e-06,
|
| 9217 |
+
"loss": 1.6469,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.36308288363082886,
|
| 9222 |
+
"grad_norm": 0.5512017607688904,
|
| 9223 |
+
"learning_rate": 1.3877238115818536e-06,
|
| 9224 |
+
"loss": 1.7897,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.36335962363359625,
|
| 9229 |
+
"grad_norm": 0.6934320330619812,
|
| 9230 |
+
"learning_rate": 1.3500495862624918e-06,
|
| 9231 |
+
"loss": 1.2528,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.36363636363636365,
|
| 9236 |
+
"grad_norm": 0.6599170565605164,
|
| 9237 |
+
"learning_rate": 1.3128903417358395e-06,
|
| 9238 |
+
"loss": 1.1695,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.36391310363913104,
|
| 9243 |
+
"grad_norm": 0.5150359272956848,
|
| 9244 |
+
"learning_rate": 1.2762462719837275e-06,
|
| 9245 |
+
"loss": 1.2896,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.36418984364189844,
|
| 9250 |
+
"grad_norm": 0.5834681987762451,
|
| 9251 |
+
"learning_rate": 1.2401175682986043e-06,
|
| 9252 |
+
"loss": 1.1797,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.36446658364466583,
|
| 9257 |
+
"grad_norm": 0.7220518589019775,
|
| 9258 |
+
"learning_rate": 1.2045044192825815e-06,
|
| 9259 |
+
"loss": 1.1729,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.3647433236474332,
|
| 9264 |
+
"grad_norm": 0.6406015753746033,
|
| 9265 |
+
"learning_rate": 1.169407010846435e-06,
|
| 9266 |
+
"loss": 1.2223,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.3650200636502006,
|
| 9271 |
+
"grad_norm": 0.6064637899398804,
|
| 9272 |
+
"learning_rate": 1.134825526208605e-06,
|
| 9273 |
+
"loss": 1.449,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.365296803652968,
|
| 9278 |
+
"grad_norm": 0.592690646648407,
|
| 9279 |
+
"learning_rate": 1.1007601458942752e-06,
|
| 9280 |
+
"loss": 1.4574,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.3655735436557354,
|
| 9285 |
+
"grad_norm": 0.6356774568557739,
|
| 9286 |
+
"learning_rate": 1.0672110477344177e-06,
|
| 9287 |
+
"loss": 1.2986,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.36585028365850286,
|
| 9292 |
+
"grad_norm": 0.597727358341217,
|
| 9293 |
+
"learning_rate": 1.0341784068648719e-06,
|
| 9294 |
+
"loss": 1.0877,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.36612702366127026,
|
| 9299 |
+
"grad_norm": 0.45690664649009705,
|
| 9300 |
+
"learning_rate": 1.0016623957254e-06,
|
| 9301 |
+
"loss": 1.1992,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.36640376366403765,
|
| 9306 |
+
"grad_norm": 0.655220627784729,
|
| 9307 |
+
"learning_rate": 9.696631840588443e-07,
|
| 9308 |
+
"loss": 1.0497,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.36668050366680505,
|
| 9313 |
+
"grad_norm": 0.48601233959198,
|
| 9314 |
+
"learning_rate": 9.381809389101825e-07,
|
| 9315 |
+
"loss": 1.0375,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.36695724366957244,
|
| 9320 |
+
"grad_norm": 0.5588715076446533,
|
| 9321 |
+
"learning_rate": 9.072158246256957e-07,
|
| 9322 |
+
"loss": 1.1141,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.36723398367233984,
|
| 9327 |
+
"grad_norm": 0.5344005823135376,
|
| 9328 |
+
"learning_rate": 8.767680028520687e-07,
|
| 9329 |
+
"loss": 1.5042,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.36751072367510723,
|
| 9334 |
+
"grad_norm": 0.5645115971565247,
|
| 9335 |
+
"learning_rate": 8.468376325355909e-07,
|
| 9336 |
+
"loss": 1.5218,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.3677874636778746,
|
| 9341 |
+
"grad_norm": 0.6104118824005127,
|
| 9342 |
+
"learning_rate": 8.174248699213128e-07,
|
| 9343 |
+
"loss": 1.8384,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.368064203680642,
|
| 9348 |
+
"grad_norm": 0.7055839896202087,
|
| 9349 |
+
"learning_rate": 7.885298685522235e-07,
|
| 9350 |
+
"loss": 1.9098,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.3683409436834094,
|
| 9355 |
+
"grad_norm": 0.526634693145752,
|
| 9356 |
+
"learning_rate": 7.601527792684304e-07,
|
| 9357 |
+
"loss": 1.1034,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.3686176836861768,
|
| 9362 |
+
"grad_norm": 0.5913317799568176,
|
| 9363 |
+
"learning_rate": 7.322937502064254e-07,
|
| 9364 |
+
"loss": 1.1516,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.36889442368894426,
|
| 9369 |
+
"grad_norm": 0.7523794174194336,
|
| 9370 |
+
"learning_rate": 7.049529267982525e-07,
|
| 9371 |
+
"loss": 1.6273,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.36917116369171166,
|
| 9376 |
+
"grad_norm": 0.6173596382141113,
|
| 9377 |
+
"learning_rate": 6.781304517707865e-07,
|
| 9378 |
+
"loss": 1.2523,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.36944790369447905,
|
| 9383 |
+
"grad_norm": 0.6670652031898499,
|
| 9384 |
+
"learning_rate": 6.518264651449779e-07,
|
| 9385 |
+
"loss": 1.5437,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.36972464369724645,
|
| 9390 |
+
"grad_norm": 0.507413923740387,
|
| 9391 |
+
"learning_rate": 6.2604110423512e-07,
|
| 9392 |
+
"loss": 1.5067,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.37000138370001384,
|
| 9397 |
+
"grad_norm": 0.5285928249359131,
|
| 9398 |
+
"learning_rate": 6.00774503648116e-07,
|
| 9399 |
+
"loss": 1.7352,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.37027812370278124,
|
| 9404 |
+
"grad_norm": 0.6521700024604797,
|
| 9405 |
+
"learning_rate": 5.760267952828025e-07,
|
| 9406 |
+
"loss": 1.3561,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.37055486370554863,
|
| 9411 |
+
"grad_norm": 0.5810115933418274,
|
| 9412 |
+
"learning_rate": 5.517981083292822e-07,
|
| 9413 |
+
"loss": 1.0703,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.370831603708316,
|
| 9418 |
+
"grad_norm": 0.5935457348823547,
|
| 9419 |
+
"learning_rate": 5.280885692681592e-07,
|
| 9420 |
+
"loss": 1.0588,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.3711083437110834,
|
| 9425 |
+
"grad_norm": 0.5672071576118469,
|
| 9426 |
+
"learning_rate": 5.048983018699827e-07,
|
| 9427 |
+
"loss": 1.3544,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.3713850837138508,
|
| 9432 |
+
"grad_norm": 0.6569086909294128,
|
| 9433 |
+
"learning_rate": 4.822274271945482e-07,
|
| 9434 |
+
"loss": 1.7341,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.3716618237166182,
|
| 9439 |
+
"grad_norm": 0.5407905578613281,
|
| 9440 |
+
"learning_rate": 4.600760635902646e-07,
|
| 9441 |
+
"loss": 1.6426,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.37193856371938566,
|
| 9446 |
+
"grad_norm": 0.5416109561920166,
|
| 9447 |
+
"learning_rate": 4.384443266935434e-07,
|
| 9448 |
+
"loss": 1.1893,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.37221530372215306,
|
| 9453 |
+
"grad_norm": 0.5008551478385925,
|
| 9454 |
+
"learning_rate": 4.173323294281994e-07,
|
| 9455 |
+
"loss": 1.5606,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.37249204372492045,
|
| 9460 |
+
"grad_norm": 0.5783845782279968,
|
| 9461 |
+
"learning_rate": 3.9674018200487326e-07,
|
| 9462 |
+
"loss": 2.0229,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.37276878372768785,
|
| 9467 |
+
"grad_norm": 0.528840184211731,
|
| 9468 |
+
"learning_rate": 3.766679919204208e-07,
|
| 9469 |
+
"loss": 1.598,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.37304552373045524,
|
| 9474 |
+
"grad_norm": 0.5217202305793762,
|
| 9475 |
+
"learning_rate": 3.5711586395739127e-07,
|
| 9476 |
+
"loss": 1.4061,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.37332226373322264,
|
| 9481 |
+
"grad_norm": 0.6138861775398254,
|
| 9482 |
+
"learning_rate": 3.3808390018343905e-07,
|
| 9483 |
+
"loss": 1.1543,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.37359900373599003,
|
| 9488 |
+
"grad_norm": 0.5172597765922546,
|
| 9489 |
+
"learning_rate": 3.195721999508461e-07,
|
| 9490 |
+
"loss": 1.1897,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.3738757437387574,
|
| 9495 |
+
"grad_norm": 0.5924769639968872,
|
| 9496 |
+
"learning_rate": 3.0158085989592244e-07,
|
| 9497 |
+
"loss": 1.4052,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.3741524837415248,
|
| 9502 |
+
"grad_norm": 0.82069993019104,
|
| 9503 |
+
"learning_rate": 2.841099739386066e-07,
|
| 9504 |
+
"loss": 1.7219,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.3744292237442922,
|
| 9509 |
+
"grad_norm": 0.4444645345211029,
|
| 9510 |
+
"learning_rate": 2.671596332818771e-07,
|
| 9511 |
+
"loss": 1.7224,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.3747059637470596,
|
| 9516 |
+
"grad_norm": 0.7407390475273132,
|
| 9517 |
+
"learning_rate": 2.507299264113416e-07,
|
| 9518 |
+
"loss": 1.0777,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.37498270374982706,
|
| 9523 |
+
"grad_norm": 0.4751138687133789,
|
| 9524 |
+
"learning_rate": 2.3482093909473756e-07,
|
| 9525 |
+
"loss": 1.3535,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.37525944375259446,
|
| 9530 |
+
"grad_norm": 0.6518821120262146,
|
| 9531 |
+
"learning_rate": 2.194327543814989e-07,
|
| 9532 |
+
"loss": 1.2305,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.37553618375536185,
|
| 9537 |
+
"grad_norm": 0.5623984336853027,
|
| 9538 |
+
"learning_rate": 2.0456545260232328e-07,
|
| 9539 |
+
"loss": 1.4732,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.37581292375812925,
|
| 9544 |
+
"grad_norm": 0.5145320892333984,
|
| 9545 |
+
"learning_rate": 1.9021911136876124e-07,
|
| 9546 |
+
"loss": 2.0338,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.37608966376089664,
|
| 9551 |
+
"grad_norm": 0.6144307851791382,
|
| 9552 |
+
"learning_rate": 1.7639380557278318e-07,
|
| 9553 |
+
"loss": 1.3467,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.37636640376366404,
|
| 9558 |
+
"grad_norm": 0.5381845831871033,
|
| 9559 |
+
"learning_rate": 1.630896073864352e-07,
|
| 9560 |
+
"loss": 1.661,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.37664314376643143,
|
| 9565 |
+
"grad_norm": 0.534831702709198,
|
| 9566 |
+
"learning_rate": 1.5030658626139503e-07,
|
| 9567 |
+
"loss": 1.1022,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.3769198837691988,
|
| 9572 |
+
"grad_norm": 0.6569218039512634,
|
| 9573 |
+
"learning_rate": 1.3804480892868344e-07,
|
| 9574 |
+
"loss": 1.9119,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.3771966237719662,
|
| 9579 |
+
"grad_norm": 0.6490721702575684,
|
| 9580 |
+
"learning_rate": 1.2630433939825327e-07,
|
| 9581 |
+
"loss": 1.4021,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.3774733637747336,
|
| 9586 |
+
"grad_norm": 0.5443328022956848,
|
| 9587 |
+
"learning_rate": 1.1508523895870093e-07,
|
| 9588 |
+
"loss": 1.7026,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.377750103777501,
|
| 9593 |
+
"grad_norm": 0.6615293622016907,
|
| 9594 |
+
"learning_rate": 1.0438756617691115e-07,
|
| 9595 |
+
"loss": 1.2067,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.37802684378026846,
|
| 9600 |
+
"grad_norm": 0.6423234939575195,
|
| 9601 |
+
"learning_rate": 9.421137689779036e-08,
|
| 9602 |
+
"loss": 1.0615,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.37830358378303586,
|
| 9607 |
+
"grad_norm": 0.6185110211372375,
|
| 9608 |
+
"learning_rate": 8.455672424393379e-08,
|
| 9609 |
+
"loss": 1.6298,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.37858032378580325,
|
| 9614 |
+
"grad_norm": 0.5677750706672668,
|
| 9615 |
+
"learning_rate": 7.542365861538114e-08,
|
| 9616 |
+
"loss": 2.0517,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.37885706378857065,
|
| 9621 |
+
"grad_norm": 0.6512099504470825,
|
| 9622 |
+
"learning_rate": 6.681222768931683e-08,
|
| 9623 |
+
"loss": 1.4206,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.37913380379133804,
|
| 9628 |
+
"grad_norm": 0.7686126232147217,
|
| 9629 |
+
"learning_rate": 5.872247641987016e-08,
|
| 9630 |
+
"loss": 1.1338,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.37941054379410544,
|
| 9635 |
+
"grad_norm": 0.5900172591209412,
|
| 9636 |
+
"learning_rate": 5.1154447037837785e-08,
|
| 9637 |
+
"loss": 1.0969,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.37968728379687283,
|
| 9642 |
+
"grad_norm": 0.6765168905258179,
|
| 9643 |
+
"learning_rate": 4.410817905049491e-08,
|
| 9644 |
+
"loss": 1.6444,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.3799640237996402,
|
| 9649 |
+
"grad_norm": 0.5050364136695862,
|
| 9650 |
+
"learning_rate": 3.758370924135113e-08,
|
| 9651 |
+
"loss": 0.9743,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.3802407638024076,
|
| 9656 |
+
"grad_norm": 0.5538033843040466,
|
| 9657 |
+
"learning_rate": 3.1581071670006015e-08,
|
| 9658 |
+
"loss": 1.4688,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.380517503805175,
|
| 9663 |
+
"grad_norm": 0.5480916500091553,
|
| 9664 |
+
"learning_rate": 2.610029767191602e-08,
|
| 9665 |
+
"loss": 1.1705,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.3807942438079424,
|
| 9670 |
+
"grad_norm": 0.569830596446991,
|
| 9671 |
+
"learning_rate": 2.1141415858305646e-08,
|
| 9672 |
+
"loss": 1.2921,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.38107098381070986,
|
| 9677 |
+
"grad_norm": 0.6757997274398804,
|
| 9678 |
+
"learning_rate": 1.67044521159343e-08,
|
| 9679 |
+
"loss": 1.4999,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.38134772381347726,
|
| 9684 |
+
"grad_norm": 0.5387727618217468,
|
| 9685 |
+
"learning_rate": 1.2789429607007463e-08,
|
| 9686 |
+
"loss": 1.248,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.38162446381624465,
|
| 9691 |
+
"grad_norm": 0.7514543533325195,
|
| 9692 |
+
"learning_rate": 9.396368769065689e-09,
|
| 9693 |
+
"loss": 1.2614,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.38190120381901205,
|
| 9698 |
+
"grad_norm": 0.5628589987754822,
|
| 9699 |
+
"learning_rate": 6.525287314851358e-09,
|
| 9700 |
+
"loss": 1.0868,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.38217794382177944,
|
| 9705 |
+
"grad_norm": 0.6602361798286438,
|
| 9706 |
+
"learning_rate": 4.176200232219873e-09,
|
| 9707 |
+
"loss": 1.1708,
|
| 9708 |
+
"step": 1381
|
| 9709 |
+
},
|
| 9710 |
+
{
|
| 9711 |
+
"epoch": 0.38245468382454684,
|
| 9712 |
+
"grad_norm": 0.6289588212966919,
|
| 9713 |
+
"learning_rate": 2.3491197840841415e-09,
|
| 9714 |
+
"loss": 1.1217,
|
| 9715 |
+
"step": 1382
|
| 9716 |
+
},
|
| 9717 |
+
{
|
| 9718 |
+
"epoch": 0.38273142382731423,
|
| 9719 |
+
"grad_norm": 0.7560678720474243,
|
| 9720 |
+
"learning_rate": 1.0440555083146564e-09,
|
| 9721 |
+
"loss": 1.0909,
|
| 9722 |
+
"step": 1383
|
| 9723 |
+
},
|
| 9724 |
+
{
|
| 9725 |
+
"epoch": 0.3830081638300816,
|
| 9726 |
+
"grad_norm": 0.4898832142353058,
|
| 9727 |
+
"learning_rate": 2.610142177172925e-10,
|
| 9728 |
+
"loss": 1.2815,
|
| 9729 |
+
"step": 1384
|
| 9730 |
+
},
|
| 9731 |
+
{
|
| 9732 |
+
"epoch": 0.383284903832849,
|
| 9733 |
+
"grad_norm": 0.6070192456245422,
|
| 9734 |
+
"learning_rate": 0.0,
|
| 9735 |
+
"loss": 1.2216,
|
| 9736 |
+
"step": 1385
|
| 9737 |
}
|
| 9738 |
],
|
| 9739 |
"logging_steps": 1,
|
|
|
|
| 9748 |
"should_evaluate": false,
|
| 9749 |
"should_log": false,
|
| 9750 |
"should_save": true,
|
| 9751 |
+
"should_training_stop": true
|
| 9752 |
},
|
| 9753 |
"attributes": {}
|
| 9754 |
}
|
| 9755 |
},
|
| 9756 |
+
"total_flos": 2.2757473741543834e+17,
|
| 9757 |
"train_batch_size": 2,
|
| 9758 |
"trial_name": null,
|
| 9759 |
"trial_params": null
|