Training in progress, step 1380, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 327040
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fbd75fe459b82d79107bef861ea6e39aa893ae9dbb113520ddb31260be3cfe69
|
| 3 |
size 327040
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 739578
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a649562f40da517bf56ca255a7b3c586a2fa0a7808fa4dd260f66edfc0e10dea
|
| 3 |
size 739578
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6cbaccca50acf8ec732395a5d7a22cd40d65183d7cabf7f03b152ffb023c89bd
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:772e9a59c8b29fc27ba69d57d27ebe27af87973bb2649f95b4032bd0ee0e5555
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 345,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -7284,6 +7284,2429 @@
|
|
| 7284 |
"eval_samples_per_second": 71.039,
|
| 7285 |
"eval_steps_per_second": 35.524,
|
| 7286 |
"step": 1035
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7287 |
}
|
| 7288 |
],
|
| 7289 |
"logging_steps": 1,
|
|
@@ -7298,12 +9721,12 @@
|
|
| 7298 |
"should_evaluate": false,
|
| 7299 |
"should_log": false,
|
| 7300 |
"should_save": true,
|
| 7301 |
-
"should_training_stop":
|
| 7302 |
},
|
| 7303 |
"attributes": {}
|
| 7304 |
}
|
| 7305 |
},
|
| 7306 |
-
"total_flos":
|
| 7307 |
"train_batch_size": 2,
|
| 7308 |
"trial_name": null,
|
| 7309 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.06870884626395649,
|
| 5 |
"eval_steps": 345,
|
| 6 |
+
"global_step": 1380,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 7284 |
"eval_samples_per_second": 71.039,
|
| 7285 |
"eval_steps_per_second": 35.524,
|
| 7286 |
"step": 1035
|
| 7287 |
+
},
|
| 7288 |
+
{
|
| 7289 |
+
"epoch": 0.05158142371699922,
|
| 7290 |
+
"grad_norm": 0.36965492367744446,
|
| 7291 |
+
"learning_rate": 2.953296322779401e-05,
|
| 7292 |
+
"loss": 11.7632,
|
| 7293 |
+
"step": 1036
|
| 7294 |
+
},
|
| 7295 |
+
{
|
| 7296 |
+
"epoch": 0.051631212736031065,
|
| 7297 |
+
"grad_norm": 0.16989007592201233,
|
| 7298 |
+
"learning_rate": 2.937044284841026e-05,
|
| 7299 |
+
"loss": 11.8155,
|
| 7300 |
+
"step": 1037
|
| 7301 |
+
},
|
| 7302 |
+
{
|
| 7303 |
+
"epoch": 0.05168100175506292,
|
| 7304 |
+
"grad_norm": 0.14676935970783234,
|
| 7305 |
+
"learning_rate": 2.920829387159596e-05,
|
| 7306 |
+
"loss": 11.8174,
|
| 7307 |
+
"step": 1038
|
| 7308 |
+
},
|
| 7309 |
+
{
|
| 7310 |
+
"epoch": 0.05173079077409477,
|
| 7311 |
+
"grad_norm": 0.13628514111042023,
|
| 7312 |
+
"learning_rate": 2.904651715000477e-05,
|
| 7313 |
+
"loss": 11.7931,
|
| 7314 |
+
"step": 1039
|
| 7315 |
+
},
|
| 7316 |
+
{
|
| 7317 |
+
"epoch": 0.05178057979312663,
|
| 7318 |
+
"grad_norm": 0.13561038672924042,
|
| 7319 |
+
"learning_rate": 2.888511353433274e-05,
|
| 7320 |
+
"loss": 11.8035,
|
| 7321 |
+
"step": 1040
|
| 7322 |
+
},
|
| 7323 |
+
{
|
| 7324 |
+
"epoch": 0.05183036881215848,
|
| 7325 |
+
"grad_norm": 0.12213072180747986,
|
| 7326 |
+
"learning_rate": 2.8724083873314078e-05,
|
| 7327 |
+
"loss": 11.8326,
|
| 7328 |
+
"step": 1041
|
| 7329 |
+
},
|
| 7330 |
+
{
|
| 7331 |
+
"epoch": 0.05188015783119033,
|
| 7332 |
+
"grad_norm": 0.1607050895690918,
|
| 7333 |
+
"learning_rate": 2.8563429013716514e-05,
|
| 7334 |
+
"loss": 11.8336,
|
| 7335 |
+
"step": 1042
|
| 7336 |
+
},
|
| 7337 |
+
{
|
| 7338 |
+
"epoch": 0.05192994685022218,
|
| 7339 |
+
"grad_norm": 0.13919731974601746,
|
| 7340 |
+
"learning_rate": 2.840314980033687e-05,
|
| 7341 |
+
"loss": 11.8007,
|
| 7342 |
+
"step": 1043
|
| 7343 |
+
},
|
| 7344 |
+
{
|
| 7345 |
+
"epoch": 0.051979735869254036,
|
| 7346 |
+
"grad_norm": 0.1409829705953598,
|
| 7347 |
+
"learning_rate": 2.8243247075996693e-05,
|
| 7348 |
+
"loss": 11.8133,
|
| 7349 |
+
"step": 1044
|
| 7350 |
+
},
|
| 7351 |
+
{
|
| 7352 |
+
"epoch": 0.05202952488828589,
|
| 7353 |
+
"grad_norm": 0.24743421375751495,
|
| 7354 |
+
"learning_rate": 2.80837216815378e-05,
|
| 7355 |
+
"loss": 11.7957,
|
| 7356 |
+
"step": 1045
|
| 7357 |
+
},
|
| 7358 |
+
{
|
| 7359 |
+
"epoch": 0.052079313907317744,
|
| 7360 |
+
"grad_norm": 0.1670936793088913,
|
| 7361 |
+
"learning_rate": 2.7924574455817732e-05,
|
| 7362 |
+
"loss": 11.7859,
|
| 7363 |
+
"step": 1046
|
| 7364 |
+
},
|
| 7365 |
+
{
|
| 7366 |
+
"epoch": 0.05212910292634959,
|
| 7367 |
+
"grad_norm": 0.18178440630435944,
|
| 7368 |
+
"learning_rate": 2.7765806235705594e-05,
|
| 7369 |
+
"loss": 11.7433,
|
| 7370 |
+
"step": 1047
|
| 7371 |
+
},
|
| 7372 |
+
{
|
| 7373 |
+
"epoch": 0.052178891945381445,
|
| 7374 |
+
"grad_norm": 0.22285285592079163,
|
| 7375 |
+
"learning_rate": 2.7607417856077334e-05,
|
| 7376 |
+
"loss": 11.7747,
|
| 7377 |
+
"step": 1048
|
| 7378 |
+
},
|
| 7379 |
+
{
|
| 7380 |
+
"epoch": 0.0522286809644133,
|
| 7381 |
+
"grad_norm": 0.15030477941036224,
|
| 7382 |
+
"learning_rate": 2.744941014981167e-05,
|
| 7383 |
+
"loss": 11.7941,
|
| 7384 |
+
"step": 1049
|
| 7385 |
+
},
|
| 7386 |
+
{
|
| 7387 |
+
"epoch": 0.05227846998344515,
|
| 7388 |
+
"grad_norm": 0.24723336100578308,
|
| 7389 |
+
"learning_rate": 2.7291783947785543e-05,
|
| 7390 |
+
"loss": 11.7684,
|
| 7391 |
+
"step": 1050
|
| 7392 |
+
},
|
| 7393 |
+
{
|
| 7394 |
+
"epoch": 0.05232825900247701,
|
| 7395 |
+
"grad_norm": 0.19226258993148804,
|
| 7396 |
+
"learning_rate": 2.7134540078869662e-05,
|
| 7397 |
+
"loss": 11.8215,
|
| 7398 |
+
"step": 1051
|
| 7399 |
+
},
|
| 7400 |
+
{
|
| 7401 |
+
"epoch": 0.052378048021508854,
|
| 7402 |
+
"grad_norm": 0.1992882490158081,
|
| 7403 |
+
"learning_rate": 2.6977679369924357e-05,
|
| 7404 |
+
"loss": 11.8158,
|
| 7405 |
+
"step": 1052
|
| 7406 |
+
},
|
| 7407 |
+
{
|
| 7408 |
+
"epoch": 0.05242783704054071,
|
| 7409 |
+
"grad_norm": 0.119527667760849,
|
| 7410 |
+
"learning_rate": 2.682120264579514e-05,
|
| 7411 |
+
"loss": 11.8189,
|
| 7412 |
+
"step": 1053
|
| 7413 |
+
},
|
| 7414 |
+
{
|
| 7415 |
+
"epoch": 0.05247762605957256,
|
| 7416 |
+
"grad_norm": 0.4070155620574951,
|
| 7417 |
+
"learning_rate": 2.6665110729308263e-05,
|
| 7418 |
+
"loss": 11.7579,
|
| 7419 |
+
"step": 1054
|
| 7420 |
+
},
|
| 7421 |
+
{
|
| 7422 |
+
"epoch": 0.052527415078604416,
|
| 7423 |
+
"grad_norm": 0.11894488334655762,
|
| 7424 |
+
"learning_rate": 2.650940444126654e-05,
|
| 7425 |
+
"loss": 11.8111,
|
| 7426 |
+
"step": 1055
|
| 7427 |
+
},
|
| 7428 |
+
{
|
| 7429 |
+
"epoch": 0.05257720409763627,
|
| 7430 |
+
"grad_norm": 0.17501161992549896,
|
| 7431 |
+
"learning_rate": 2.6354084600445016e-05,
|
| 7432 |
+
"loss": 11.7862,
|
| 7433 |
+
"step": 1056
|
| 7434 |
+
},
|
| 7435 |
+
{
|
| 7436 |
+
"epoch": 0.052626993116668117,
|
| 7437 |
+
"grad_norm": 0.14933635294437408,
|
| 7438 |
+
"learning_rate": 2.6199152023586503e-05,
|
| 7439 |
+
"loss": 11.8161,
|
| 7440 |
+
"step": 1057
|
| 7441 |
+
},
|
| 7442 |
+
{
|
| 7443 |
+
"epoch": 0.05267678213569997,
|
| 7444 |
+
"grad_norm": 0.1464557647705078,
|
| 7445 |
+
"learning_rate": 2.6044607525397502e-05,
|
| 7446 |
+
"loss": 11.8351,
|
| 7447 |
+
"step": 1058
|
| 7448 |
+
},
|
| 7449 |
+
{
|
| 7450 |
+
"epoch": 0.052726571154731824,
|
| 7451 |
+
"grad_norm": 0.12073122709989548,
|
| 7452 |
+
"learning_rate": 2.5890451918543824e-05,
|
| 7453 |
+
"loss": 11.8176,
|
| 7454 |
+
"step": 1059
|
| 7455 |
+
},
|
| 7456 |
+
{
|
| 7457 |
+
"epoch": 0.05277636017376368,
|
| 7458 |
+
"grad_norm": 0.26039791107177734,
|
| 7459 |
+
"learning_rate": 2.5736686013646228e-05,
|
| 7460 |
+
"loss": 11.745,
|
| 7461 |
+
"step": 1060
|
| 7462 |
+
},
|
| 7463 |
+
{
|
| 7464 |
+
"epoch": 0.05282614919279553,
|
| 7465 |
+
"grad_norm": 0.3511323630809784,
|
| 7466 |
+
"learning_rate": 2.5583310619276247e-05,
|
| 7467 |
+
"loss": 11.7189,
|
| 7468 |
+
"step": 1061
|
| 7469 |
+
},
|
| 7470 |
+
{
|
| 7471 |
+
"epoch": 0.05287593821182738,
|
| 7472 |
+
"grad_norm": 0.1853386014699936,
|
| 7473 |
+
"learning_rate": 2.5430326541952087e-05,
|
| 7474 |
+
"loss": 11.7723,
|
| 7475 |
+
"step": 1062
|
| 7476 |
+
},
|
| 7477 |
+
{
|
| 7478 |
+
"epoch": 0.05292572723085923,
|
| 7479 |
+
"grad_norm": 0.28304725885391235,
|
| 7480 |
+
"learning_rate": 2.527773458613404e-05,
|
| 7481 |
+
"loss": 11.8249,
|
| 7482 |
+
"step": 1063
|
| 7483 |
+
},
|
| 7484 |
+
{
|
| 7485 |
+
"epoch": 0.05297551624989109,
|
| 7486 |
+
"grad_norm": 0.14613457024097443,
|
| 7487 |
+
"learning_rate": 2.5125535554220482e-05,
|
| 7488 |
+
"loss": 11.7718,
|
| 7489 |
+
"step": 1064
|
| 7490 |
+
},
|
| 7491 |
+
{
|
| 7492 |
+
"epoch": 0.05302530526892294,
|
| 7493 |
+
"grad_norm": 0.16421784460544586,
|
| 7494 |
+
"learning_rate": 2.4973730246543735e-05,
|
| 7495 |
+
"loss": 11.7844,
|
| 7496 |
+
"step": 1065
|
| 7497 |
+
},
|
| 7498 |
+
{
|
| 7499 |
+
"epoch": 0.05307509428795479,
|
| 7500 |
+
"grad_norm": 0.1314469873905182,
|
| 7501 |
+
"learning_rate": 2.482231946136563e-05,
|
| 7502 |
+
"loss": 11.7913,
|
| 7503 |
+
"step": 1066
|
| 7504 |
+
},
|
| 7505 |
+
{
|
| 7506 |
+
"epoch": 0.05312488330698664,
|
| 7507 |
+
"grad_norm": 0.18584749102592468,
|
| 7508 |
+
"learning_rate": 2.4671303994873373e-05,
|
| 7509 |
+
"loss": 11.8189,
|
| 7510 |
+
"step": 1067
|
| 7511 |
+
},
|
| 7512 |
+
{
|
| 7513 |
+
"epoch": 0.053174672326018496,
|
| 7514 |
+
"grad_norm": 0.15987344086170197,
|
| 7515 |
+
"learning_rate": 2.4520684641175572e-05,
|
| 7516 |
+
"loss": 11.8061,
|
| 7517 |
+
"step": 1068
|
| 7518 |
+
},
|
| 7519 |
+
{
|
| 7520 |
+
"epoch": 0.05322446134505035,
|
| 7521 |
+
"grad_norm": 0.12855324149131775,
|
| 7522 |
+
"learning_rate": 2.437046219229776e-05,
|
| 7523 |
+
"loss": 11.8157,
|
| 7524 |
+
"step": 1069
|
| 7525 |
+
},
|
| 7526 |
+
{
|
| 7527 |
+
"epoch": 0.053274250364082204,
|
| 7528 |
+
"grad_norm": 0.20354953408241272,
|
| 7529 |
+
"learning_rate": 2.4220637438178317e-05,
|
| 7530 |
+
"loss": 11.7676,
|
| 7531 |
+
"step": 1070
|
| 7532 |
+
},
|
| 7533 |
+
{
|
| 7534 |
+
"epoch": 0.05332403938311405,
|
| 7535 |
+
"grad_norm": 0.2609831988811493,
|
| 7536 |
+
"learning_rate": 2.4071211166664575e-05,
|
| 7537 |
+
"loss": 11.8065,
|
| 7538 |
+
"step": 1071
|
| 7539 |
+
},
|
| 7540 |
+
{
|
| 7541 |
+
"epoch": 0.053373828402145905,
|
| 7542 |
+
"grad_norm": 0.16968388855457306,
|
| 7543 |
+
"learning_rate": 2.3922184163508254e-05,
|
| 7544 |
+
"loss": 11.7532,
|
| 7545 |
+
"step": 1072
|
| 7546 |
+
},
|
| 7547 |
+
{
|
| 7548 |
+
"epoch": 0.05342361742117776,
|
| 7549 |
+
"grad_norm": 0.18621961772441864,
|
| 7550 |
+
"learning_rate": 2.3773557212361586e-05,
|
| 7551 |
+
"loss": 11.811,
|
| 7552 |
+
"step": 1073
|
| 7553 |
+
},
|
| 7554 |
+
{
|
| 7555 |
+
"epoch": 0.05347340644020961,
|
| 7556 |
+
"grad_norm": 0.10710092633962631,
|
| 7557 |
+
"learning_rate": 2.3625331094773206e-05,
|
| 7558 |
+
"loss": 11.7985,
|
| 7559 |
+
"step": 1074
|
| 7560 |
+
},
|
| 7561 |
+
{
|
| 7562 |
+
"epoch": 0.05352319545924147,
|
| 7563 |
+
"grad_norm": 0.11561262607574463,
|
| 7564 |
+
"learning_rate": 2.3477506590183972e-05,
|
| 7565 |
+
"loss": 11.8114,
|
| 7566 |
+
"step": 1075
|
| 7567 |
+
},
|
| 7568 |
+
{
|
| 7569 |
+
"epoch": 0.053572984478273314,
|
| 7570 |
+
"grad_norm": 0.20786364376544952,
|
| 7571 |
+
"learning_rate": 2.3330084475922797e-05,
|
| 7572 |
+
"loss": 11.8402,
|
| 7573 |
+
"step": 1076
|
| 7574 |
+
},
|
| 7575 |
+
{
|
| 7576 |
+
"epoch": 0.05362277349730517,
|
| 7577 |
+
"grad_norm": 0.16998012363910675,
|
| 7578 |
+
"learning_rate": 2.3183065527202718e-05,
|
| 7579 |
+
"loss": 11.8128,
|
| 7580 |
+
"step": 1077
|
| 7581 |
+
},
|
| 7582 |
+
{
|
| 7583 |
+
"epoch": 0.05367256251633702,
|
| 7584 |
+
"grad_norm": 0.1251063197851181,
|
| 7585 |
+
"learning_rate": 2.303645051711677e-05,
|
| 7586 |
+
"loss": 11.833,
|
| 7587 |
+
"step": 1078
|
| 7588 |
+
},
|
| 7589 |
+
{
|
| 7590 |
+
"epoch": 0.053722351535368876,
|
| 7591 |
+
"grad_norm": 0.2824801504611969,
|
| 7592 |
+
"learning_rate": 2.289024021663376e-05,
|
| 7593 |
+
"loss": 11.7915,
|
| 7594 |
+
"step": 1079
|
| 7595 |
+
},
|
| 7596 |
+
{
|
| 7597 |
+
"epoch": 0.05377214055440073,
|
| 7598 |
+
"grad_norm": 0.28118106722831726,
|
| 7599 |
+
"learning_rate": 2.2744435394594497e-05,
|
| 7600 |
+
"loss": 11.7796,
|
| 7601 |
+
"step": 1080
|
| 7602 |
+
},
|
| 7603 |
+
{
|
| 7604 |
+
"epoch": 0.05382192957343258,
|
| 7605 |
+
"grad_norm": 0.16945096850395203,
|
| 7606 |
+
"learning_rate": 2.2599036817707532e-05,
|
| 7607 |
+
"loss": 11.8112,
|
| 7608 |
+
"step": 1081
|
| 7609 |
+
},
|
| 7610 |
+
{
|
| 7611 |
+
"epoch": 0.05387171859246443,
|
| 7612 |
+
"grad_norm": 0.15975421667099,
|
| 7613 |
+
"learning_rate": 2.245404525054515e-05,
|
| 7614 |
+
"loss": 11.7715,
|
| 7615 |
+
"step": 1082
|
| 7616 |
+
},
|
| 7617 |
+
{
|
| 7618 |
+
"epoch": 0.053921507611496285,
|
| 7619 |
+
"grad_norm": 0.27923616766929626,
|
| 7620 |
+
"learning_rate": 2.230946145553947e-05,
|
| 7621 |
+
"loss": 11.7882,
|
| 7622 |
+
"step": 1083
|
| 7623 |
+
},
|
| 7624 |
+
{
|
| 7625 |
+
"epoch": 0.05397129663052814,
|
| 7626 |
+
"grad_norm": 0.3360084295272827,
|
| 7627 |
+
"learning_rate": 2.2165286192978342e-05,
|
| 7628 |
+
"loss": 11.7996,
|
| 7629 |
+
"step": 1084
|
| 7630 |
+
},
|
| 7631 |
+
{
|
| 7632 |
+
"epoch": 0.05402108564955999,
|
| 7633 |
+
"grad_norm": 0.17016026377677917,
|
| 7634 |
+
"learning_rate": 2.20215202210013e-05,
|
| 7635 |
+
"loss": 11.8092,
|
| 7636 |
+
"step": 1085
|
| 7637 |
+
},
|
| 7638 |
+
{
|
| 7639 |
+
"epoch": 0.05407087466859184,
|
| 7640 |
+
"grad_norm": 0.12959083914756775,
|
| 7641 |
+
"learning_rate": 2.1878164295595737e-05,
|
| 7642 |
+
"loss": 11.8043,
|
| 7643 |
+
"step": 1086
|
| 7644 |
+
},
|
| 7645 |
+
{
|
| 7646 |
+
"epoch": 0.054120663687623694,
|
| 7647 |
+
"grad_norm": 0.18477118015289307,
|
| 7648 |
+
"learning_rate": 2.1735219170592734e-05,
|
| 7649 |
+
"loss": 11.7787,
|
| 7650 |
+
"step": 1087
|
| 7651 |
+
},
|
| 7652 |
+
{
|
| 7653 |
+
"epoch": 0.05417045270665555,
|
| 7654 |
+
"grad_norm": 0.16290272772312164,
|
| 7655 |
+
"learning_rate": 2.1592685597663286e-05,
|
| 7656 |
+
"loss": 11.8271,
|
| 7657 |
+
"step": 1088
|
| 7658 |
+
},
|
| 7659 |
+
{
|
| 7660 |
+
"epoch": 0.0542202417256874,
|
| 7661 |
+
"grad_norm": 0.21891824901103973,
|
| 7662 |
+
"learning_rate": 2.1450564326314228e-05,
|
| 7663 |
+
"loss": 11.8239,
|
| 7664 |
+
"step": 1089
|
| 7665 |
+
},
|
| 7666 |
+
{
|
| 7667 |
+
"epoch": 0.054270030744719255,
|
| 7668 |
+
"grad_norm": 0.17136146128177643,
|
| 7669 |
+
"learning_rate": 2.130885610388428e-05,
|
| 7670 |
+
"loss": 11.7743,
|
| 7671 |
+
"step": 1090
|
| 7672 |
+
},
|
| 7673 |
+
{
|
| 7674 |
+
"epoch": 0.0543198197637511,
|
| 7675 |
+
"grad_norm": 0.22959406673908234,
|
| 7676 |
+
"learning_rate": 2.1167561675540228e-05,
|
| 7677 |
+
"loss": 11.7627,
|
| 7678 |
+
"step": 1091
|
| 7679 |
+
},
|
| 7680 |
+
{
|
| 7681 |
+
"epoch": 0.054369608782782956,
|
| 7682 |
+
"grad_norm": 0.15619830787181854,
|
| 7683 |
+
"learning_rate": 2.1026681784272872e-05,
|
| 7684 |
+
"loss": 11.8268,
|
| 7685 |
+
"step": 1092
|
| 7686 |
+
},
|
| 7687 |
+
{
|
| 7688 |
+
"epoch": 0.05441939780181481,
|
| 7689 |
+
"grad_norm": 0.22131508588790894,
|
| 7690 |
+
"learning_rate": 2.088621717089325e-05,
|
| 7691 |
+
"loss": 11.7739,
|
| 7692 |
+
"step": 1093
|
| 7693 |
+
},
|
| 7694 |
+
{
|
| 7695 |
+
"epoch": 0.054469186820846664,
|
| 7696 |
+
"grad_norm": 0.22760333120822906,
|
| 7697 |
+
"learning_rate": 2.074616857402867e-05,
|
| 7698 |
+
"loss": 11.7798,
|
| 7699 |
+
"step": 1094
|
| 7700 |
+
},
|
| 7701 |
+
{
|
| 7702 |
+
"epoch": 0.05451897583987852,
|
| 7703 |
+
"grad_norm": 0.13869380950927734,
|
| 7704 |
+
"learning_rate": 2.0606536730118763e-05,
|
| 7705 |
+
"loss": 11.802,
|
| 7706 |
+
"step": 1095
|
| 7707 |
+
},
|
| 7708 |
+
{
|
| 7709 |
+
"epoch": 0.054568764858910365,
|
| 7710 |
+
"grad_norm": 0.21458180248737335,
|
| 7711 |
+
"learning_rate": 2.0467322373411757e-05,
|
| 7712 |
+
"loss": 11.7855,
|
| 7713 |
+
"step": 1096
|
| 7714 |
+
},
|
| 7715 |
+
{
|
| 7716 |
+
"epoch": 0.05461855387794222,
|
| 7717 |
+
"grad_norm": 0.20472285151481628,
|
| 7718 |
+
"learning_rate": 2.0328526235960565e-05,
|
| 7719 |
+
"loss": 11.7979,
|
| 7720 |
+
"step": 1097
|
| 7721 |
+
},
|
| 7722 |
+
{
|
| 7723 |
+
"epoch": 0.05466834289697407,
|
| 7724 |
+
"grad_norm": 0.2615392506122589,
|
| 7725 |
+
"learning_rate": 2.019014904761879e-05,
|
| 7726 |
+
"loss": 11.7856,
|
| 7727 |
+
"step": 1098
|
| 7728 |
+
},
|
| 7729 |
+
{
|
| 7730 |
+
"epoch": 0.05471813191600593,
|
| 7731 |
+
"grad_norm": 0.17862752079963684,
|
| 7732 |
+
"learning_rate": 2.0052191536037146e-05,
|
| 7733 |
+
"loss": 11.7582,
|
| 7734 |
+
"step": 1099
|
| 7735 |
+
},
|
| 7736 |
+
{
|
| 7737 |
+
"epoch": 0.054767920935037774,
|
| 7738 |
+
"grad_norm": 0.1673935204744339,
|
| 7739 |
+
"learning_rate": 1.9914654426659374e-05,
|
| 7740 |
+
"loss": 11.8152,
|
| 7741 |
+
"step": 1100
|
| 7742 |
+
},
|
| 7743 |
+
{
|
| 7744 |
+
"epoch": 0.05481770995406963,
|
| 7745 |
+
"grad_norm": 0.16946442425251007,
|
| 7746 |
+
"learning_rate": 1.9777538442718645e-05,
|
| 7747 |
+
"loss": 11.7649,
|
| 7748 |
+
"step": 1101
|
| 7749 |
+
},
|
| 7750 |
+
{
|
| 7751 |
+
"epoch": 0.05486749897310148,
|
| 7752 |
+
"grad_norm": 0.12239822745323181,
|
| 7753 |
+
"learning_rate": 1.9640844305233642e-05,
|
| 7754 |
+
"loss": 11.8203,
|
| 7755 |
+
"step": 1102
|
| 7756 |
+
},
|
| 7757 |
+
{
|
| 7758 |
+
"epoch": 0.054917287992133336,
|
| 7759 |
+
"grad_norm": 0.1471148282289505,
|
| 7760 |
+
"learning_rate": 1.95045727330047e-05,
|
| 7761 |
+
"loss": 11.8167,
|
| 7762 |
+
"step": 1103
|
| 7763 |
+
},
|
| 7764 |
+
{
|
| 7765 |
+
"epoch": 0.05496707701116519,
|
| 7766 |
+
"grad_norm": 0.2793964147567749,
|
| 7767 |
+
"learning_rate": 1.936872444261022e-05,
|
| 7768 |
+
"loss": 11.7803,
|
| 7769 |
+
"step": 1104
|
| 7770 |
+
},
|
| 7771 |
+
{
|
| 7772 |
+
"epoch": 0.05501686603019704,
|
| 7773 |
+
"grad_norm": 0.1413748562335968,
|
| 7774 |
+
"learning_rate": 1.9233300148402765e-05,
|
| 7775 |
+
"loss": 11.821,
|
| 7776 |
+
"step": 1105
|
| 7777 |
+
},
|
| 7778 |
+
{
|
| 7779 |
+
"epoch": 0.05506665504922889,
|
| 7780 |
+
"grad_norm": 0.14121146500110626,
|
| 7781 |
+
"learning_rate": 1.9098300562505266e-05,
|
| 7782 |
+
"loss": 11.8088,
|
| 7783 |
+
"step": 1106
|
| 7784 |
+
},
|
| 7785 |
+
{
|
| 7786 |
+
"epoch": 0.055116444068260745,
|
| 7787 |
+
"grad_norm": 0.13371479511260986,
|
| 7788 |
+
"learning_rate": 1.8963726394807424e-05,
|
| 7789 |
+
"loss": 11.8042,
|
| 7790 |
+
"step": 1107
|
| 7791 |
+
},
|
| 7792 |
+
{
|
| 7793 |
+
"epoch": 0.0551662330872926,
|
| 7794 |
+
"grad_norm": 0.15749718248844147,
|
| 7795 |
+
"learning_rate": 1.8829578352961885e-05,
|
| 7796 |
+
"loss": 11.8275,
|
| 7797 |
+
"step": 1108
|
| 7798 |
+
},
|
| 7799 |
+
{
|
| 7800 |
+
"epoch": 0.05521602210632445,
|
| 7801 |
+
"grad_norm": 0.171096533536911,
|
| 7802 |
+
"learning_rate": 1.869585714238047e-05,
|
| 7803 |
+
"loss": 11.7662,
|
| 7804 |
+
"step": 1109
|
| 7805 |
+
},
|
| 7806 |
+
{
|
| 7807 |
+
"epoch": 0.0552658111253563,
|
| 7808 |
+
"grad_norm": 0.25892457365989685,
|
| 7809 |
+
"learning_rate": 1.8562563466230576e-05,
|
| 7810 |
+
"loss": 11.7974,
|
| 7811 |
+
"step": 1110
|
| 7812 |
+
},
|
| 7813 |
+
{
|
| 7814 |
+
"epoch": 0.055315600144388154,
|
| 7815 |
+
"grad_norm": 0.34793660044670105,
|
| 7816 |
+
"learning_rate": 1.8429698025431465e-05,
|
| 7817 |
+
"loss": 11.7446,
|
| 7818 |
+
"step": 1111
|
| 7819 |
+
},
|
| 7820 |
+
{
|
| 7821 |
+
"epoch": 0.05536538916342001,
|
| 7822 |
+
"grad_norm": 0.16733016073703766,
|
| 7823 |
+
"learning_rate": 1.8297261518650456e-05,
|
| 7824 |
+
"loss": 11.786,
|
| 7825 |
+
"step": 1112
|
| 7826 |
+
},
|
| 7827 |
+
{
|
| 7828 |
+
"epoch": 0.05541517818245186,
|
| 7829 |
+
"grad_norm": 0.22605200111865997,
|
| 7830 |
+
"learning_rate": 1.8165254642299322e-05,
|
| 7831 |
+
"loss": 11.7191,
|
| 7832 |
+
"step": 1113
|
| 7833 |
+
},
|
| 7834 |
+
{
|
| 7835 |
+
"epoch": 0.055464967201483716,
|
| 7836 |
+
"grad_norm": 0.4807453751564026,
|
| 7837 |
+
"learning_rate": 1.8033678090530813e-05,
|
| 7838 |
+
"loss": 11.7977,
|
| 7839 |
+
"step": 1114
|
| 7840 |
+
},
|
| 7841 |
+
{
|
| 7842 |
+
"epoch": 0.05551475622051556,
|
| 7843 |
+
"grad_norm": 0.16972611844539642,
|
| 7844 |
+
"learning_rate": 1.790253255523465e-05,
|
| 7845 |
+
"loss": 11.8041,
|
| 7846 |
+
"step": 1115
|
| 7847 |
+
},
|
| 7848 |
+
{
|
| 7849 |
+
"epoch": 0.05556454523954742,
|
| 7850 |
+
"grad_norm": 0.22246819734573364,
|
| 7851 |
+
"learning_rate": 1.7771818726034105e-05,
|
| 7852 |
+
"loss": 11.8034,
|
| 7853 |
+
"step": 1116
|
| 7854 |
+
},
|
| 7855 |
+
{
|
| 7856 |
+
"epoch": 0.05561433425857927,
|
| 7857 |
+
"grad_norm": 0.14122234284877777,
|
| 7858 |
+
"learning_rate": 1.7641537290282472e-05,
|
| 7859 |
+
"loss": 11.8083,
|
| 7860 |
+
"step": 1117
|
| 7861 |
+
},
|
| 7862 |
+
{
|
| 7863 |
+
"epoch": 0.055664123277611124,
|
| 7864 |
+
"grad_norm": 0.16438360512256622,
|
| 7865 |
+
"learning_rate": 1.751168893305918e-05,
|
| 7866 |
+
"loss": 11.7967,
|
| 7867 |
+
"step": 1118
|
| 7868 |
+
},
|
| 7869 |
+
{
|
| 7870 |
+
"epoch": 0.05571391229664298,
|
| 7871 |
+
"grad_norm": 0.2635161578655243,
|
| 7872 |
+
"learning_rate": 1.7382274337166317e-05,
|
| 7873 |
+
"loss": 11.7686,
|
| 7874 |
+
"step": 1119
|
| 7875 |
+
},
|
| 7876 |
+
{
|
| 7877 |
+
"epoch": 0.055763701315674825,
|
| 7878 |
+
"grad_norm": 0.17216451466083527,
|
| 7879 |
+
"learning_rate": 1.7253294183125223e-05,
|
| 7880 |
+
"loss": 11.7726,
|
| 7881 |
+
"step": 1120
|
| 7882 |
+
},
|
| 7883 |
+
{
|
| 7884 |
+
"epoch": 0.05581349033470668,
|
| 7885 |
+
"grad_norm": 0.13563023507595062,
|
| 7886 |
+
"learning_rate": 1.712474914917259e-05,
|
| 7887 |
+
"loss": 11.8051,
|
| 7888 |
+
"step": 1121
|
| 7889 |
+
},
|
| 7890 |
+
{
|
| 7891 |
+
"epoch": 0.05586327935373853,
|
| 7892 |
+
"grad_norm": 0.17967721819877625,
|
| 7893 |
+
"learning_rate": 1.699663991125705e-05,
|
| 7894 |
+
"loss": 11.8047,
|
| 7895 |
+
"step": 1122
|
| 7896 |
+
},
|
| 7897 |
+
{
|
| 7898 |
+
"epoch": 0.05591306837277039,
|
| 7899 |
+
"grad_norm": 0.18859446048736572,
|
| 7900 |
+
"learning_rate": 1.6868967143035764e-05,
|
| 7901 |
+
"loss": 11.771,
|
| 7902 |
+
"step": 1123
|
| 7903 |
+
},
|
| 7904 |
+
{
|
| 7905 |
+
"epoch": 0.05596285739180224,
|
| 7906 |
+
"grad_norm": 0.12961724400520325,
|
| 7907 |
+
"learning_rate": 1.6741731515870594e-05,
|
| 7908 |
+
"loss": 11.8262,
|
| 7909 |
+
"step": 1124
|
| 7910 |
+
},
|
| 7911 |
+
{
|
| 7912 |
+
"epoch": 0.05601264641083409,
|
| 7913 |
+
"grad_norm": 0.20054270327091217,
|
| 7914 |
+
"learning_rate": 1.6614933698824732e-05,
|
| 7915 |
+
"loss": 11.8442,
|
| 7916 |
+
"step": 1125
|
| 7917 |
+
},
|
| 7918 |
+
{
|
| 7919 |
+
"epoch": 0.05606243542986594,
|
| 7920 |
+
"grad_norm": 0.3376655876636505,
|
| 7921 |
+
"learning_rate": 1.6488574358659214e-05,
|
| 7922 |
+
"loss": 11.7779,
|
| 7923 |
+
"step": 1126
|
| 7924 |
+
},
|
| 7925 |
+
{
|
| 7926 |
+
"epoch": 0.056112224448897796,
|
| 7927 |
+
"grad_norm": 0.149736687541008,
|
| 7928 |
+
"learning_rate": 1.636265415982936e-05,
|
| 7929 |
+
"loss": 11.8287,
|
| 7930 |
+
"step": 1127
|
| 7931 |
+
},
|
| 7932 |
+
{
|
| 7933 |
+
"epoch": 0.05616201346792965,
|
| 7934 |
+
"grad_norm": 0.23617173731327057,
|
| 7935 |
+
"learning_rate": 1.623717376448123e-05,
|
| 7936 |
+
"loss": 11.7246,
|
| 7937 |
+
"step": 1128
|
| 7938 |
+
},
|
| 7939 |
+
{
|
| 7940 |
+
"epoch": 0.056211802486961504,
|
| 7941 |
+
"grad_norm": 0.11661408841609955,
|
| 7942 |
+
"learning_rate": 1.611213383244824e-05,
|
| 7943 |
+
"loss": 11.8183,
|
| 7944 |
+
"step": 1129
|
| 7945 |
+
},
|
| 7946 |
+
{
|
| 7947 |
+
"epoch": 0.05626159150599335,
|
| 7948 |
+
"grad_norm": 0.1655798852443695,
|
| 7949 |
+
"learning_rate": 1.5987535021247667e-05,
|
| 7950 |
+
"loss": 11.8049,
|
| 7951 |
+
"step": 1130
|
| 7952 |
+
},
|
| 7953 |
+
{
|
| 7954 |
+
"epoch": 0.056311380525025205,
|
| 7955 |
+
"grad_norm": 0.2158203274011612,
|
| 7956 |
+
"learning_rate": 1.5863377986077087e-05,
|
| 7957 |
+
"loss": 11.7783,
|
| 7958 |
+
"step": 1131
|
| 7959 |
+
},
|
| 7960 |
+
{
|
| 7961 |
+
"epoch": 0.05636116954405706,
|
| 7962 |
+
"grad_norm": 0.14611691236495972,
|
| 7963 |
+
"learning_rate": 1.5739663379811122e-05,
|
| 7964 |
+
"loss": 11.7707,
|
| 7965 |
+
"step": 1132
|
| 7966 |
+
},
|
| 7967 |
+
{
|
| 7968 |
+
"epoch": 0.05641095856308891,
|
| 7969 |
+
"grad_norm": 0.14657485485076904,
|
| 7970 |
+
"learning_rate": 1.5616391852997835e-05,
|
| 7971 |
+
"loss": 11.8083,
|
| 7972 |
+
"step": 1133
|
| 7973 |
+
},
|
| 7974 |
+
{
|
| 7975 |
+
"epoch": 0.05646074758212076,
|
| 7976 |
+
"grad_norm": 0.2954987585544586,
|
| 7977 |
+
"learning_rate": 1.549356405385538e-05,
|
| 7978 |
+
"loss": 11.7684,
|
| 7979 |
+
"step": 1134
|
| 7980 |
+
},
|
| 7981 |
+
{
|
| 7982 |
+
"epoch": 0.056510536601152614,
|
| 7983 |
+
"grad_norm": 0.15783213078975677,
|
| 7984 |
+
"learning_rate": 1.5371180628268587e-05,
|
| 7985 |
+
"loss": 11.7951,
|
| 7986 |
+
"step": 1135
|
| 7987 |
+
},
|
| 7988 |
+
{
|
| 7989 |
+
"epoch": 0.05656032562018447,
|
| 7990 |
+
"grad_norm": 0.24370360374450684,
|
| 7991 |
+
"learning_rate": 1.524924221978563e-05,
|
| 7992 |
+
"loss": 11.7967,
|
| 7993 |
+
"step": 1136
|
| 7994 |
+
},
|
| 7995 |
+
{
|
| 7996 |
+
"epoch": 0.05661011463921632,
|
| 7997 |
+
"grad_norm": 0.20257197320461273,
|
| 7998 |
+
"learning_rate": 1.512774946961445e-05,
|
| 7999 |
+
"loss": 11.7978,
|
| 8000 |
+
"step": 1137
|
| 8001 |
+
},
|
| 8002 |
+
{
|
| 8003 |
+
"epoch": 0.056659903658248176,
|
| 8004 |
+
"grad_norm": 0.1553078144788742,
|
| 8005 |
+
"learning_rate": 1.500670301661966e-05,
|
| 8006 |
+
"loss": 11.8468,
|
| 8007 |
+
"step": 1138
|
| 8008 |
+
},
|
| 8009 |
+
{
|
| 8010 |
+
"epoch": 0.05670969267728002,
|
| 8011 |
+
"grad_norm": 0.16919642686843872,
|
| 8012 |
+
"learning_rate": 1.4886103497318904e-05,
|
| 8013 |
+
"loss": 11.8252,
|
| 8014 |
+
"step": 1139
|
| 8015 |
+
},
|
| 8016 |
+
{
|
| 8017 |
+
"epoch": 0.05675948169631188,
|
| 8018 |
+
"grad_norm": 0.1431400328874588,
|
| 8019 |
+
"learning_rate": 1.476595154587973e-05,
|
| 8020 |
+
"loss": 11.8014,
|
| 8021 |
+
"step": 1140
|
| 8022 |
+
},
|
| 8023 |
+
{
|
| 8024 |
+
"epoch": 0.05680927071534373,
|
| 8025 |
+
"grad_norm": 0.1831301897764206,
|
| 8026 |
+
"learning_rate": 1.4646247794116164e-05,
|
| 8027 |
+
"loss": 11.8,
|
| 8028 |
+
"step": 1141
|
| 8029 |
+
},
|
| 8030 |
+
{
|
| 8031 |
+
"epoch": 0.056859059734375585,
|
| 8032 |
+
"grad_norm": 0.12235870957374573,
|
| 8033 |
+
"learning_rate": 1.4526992871485345e-05,
|
| 8034 |
+
"loss": 11.8353,
|
| 8035 |
+
"step": 1142
|
| 8036 |
+
},
|
| 8037 |
+
{
|
| 8038 |
+
"epoch": 0.05690884875340744,
|
| 8039 |
+
"grad_norm": 0.1734725534915924,
|
| 8040 |
+
"learning_rate": 1.4408187405084317e-05,
|
| 8041 |
+
"loss": 11.7143,
|
| 8042 |
+
"step": 1143
|
| 8043 |
+
},
|
| 8044 |
+
{
|
| 8045 |
+
"epoch": 0.056958637772439286,
|
| 8046 |
+
"grad_norm": 0.16125091910362244,
|
| 8047 |
+
"learning_rate": 1.428983201964662e-05,
|
| 8048 |
+
"loss": 11.7846,
|
| 8049 |
+
"step": 1144
|
| 8050 |
+
},
|
| 8051 |
+
{
|
| 8052 |
+
"epoch": 0.05700842679147114,
|
| 8053 |
+
"grad_norm": 0.14326797425746918,
|
| 8054 |
+
"learning_rate": 1.4171927337539104e-05,
|
| 8055 |
+
"loss": 11.8175,
|
| 8056 |
+
"step": 1145
|
| 8057 |
+
},
|
| 8058 |
+
{
|
| 8059 |
+
"epoch": 0.057058215810502994,
|
| 8060 |
+
"grad_norm": 0.13907255232334137,
|
| 8061 |
+
"learning_rate": 1.405447397875863e-05,
|
| 8062 |
+
"loss": 11.8178,
|
| 8063 |
+
"step": 1146
|
| 8064 |
+
},
|
| 8065 |
+
{
|
| 8066 |
+
"epoch": 0.05710800482953485,
|
| 8067 |
+
"grad_norm": 0.19086763262748718,
|
| 8068 |
+
"learning_rate": 1.3937472560928733e-05,
|
| 8069 |
+
"loss": 11.818,
|
| 8070 |
+
"step": 1147
|
| 8071 |
+
},
|
| 8072 |
+
{
|
| 8073 |
+
"epoch": 0.0571577938485667,
|
| 8074 |
+
"grad_norm": 0.20695127546787262,
|
| 8075 |
+
"learning_rate": 1.3820923699296484e-05,
|
| 8076 |
+
"loss": 11.8039,
|
| 8077 |
+
"step": 1148
|
| 8078 |
+
},
|
| 8079 |
+
{
|
| 8080 |
+
"epoch": 0.05720758286759855,
|
| 8081 |
+
"grad_norm": 0.13429413735866547,
|
| 8082 |
+
"learning_rate": 1.370482800672922e-05,
|
| 8083 |
+
"loss": 11.8068,
|
| 8084 |
+
"step": 1149
|
| 8085 |
+
},
|
| 8086 |
+
{
|
| 8087 |
+
"epoch": 0.0572573718866304,
|
| 8088 |
+
"grad_norm": 0.23454822599887848,
|
| 8089 |
+
"learning_rate": 1.3589186093711226e-05,
|
| 8090 |
+
"loss": 11.7373,
|
| 8091 |
+
"step": 1150
|
| 8092 |
+
},
|
| 8093 |
+
{
|
| 8094 |
+
"epoch": 0.057307160905662256,
|
| 8095 |
+
"grad_norm": 0.16149325668811798,
|
| 8096 |
+
"learning_rate": 1.3473998568340718e-05,
|
| 8097 |
+
"loss": 11.8087,
|
| 8098 |
+
"step": 1151
|
| 8099 |
+
},
|
| 8100 |
+
{
|
| 8101 |
+
"epoch": 0.05735694992469411,
|
| 8102 |
+
"grad_norm": 0.17746388912200928,
|
| 8103 |
+
"learning_rate": 1.3359266036326412e-05,
|
| 8104 |
+
"loss": 11.8072,
|
| 8105 |
+
"step": 1152
|
| 8106 |
+
},
|
| 8107 |
+
{
|
| 8108 |
+
"epoch": 0.057406738943725964,
|
| 8109 |
+
"grad_norm": 0.14605437219142914,
|
| 8110 |
+
"learning_rate": 1.3244989100984573e-05,
|
| 8111 |
+
"loss": 11.8014,
|
| 8112 |
+
"step": 1153
|
| 8113 |
+
},
|
| 8114 |
+
{
|
| 8115 |
+
"epoch": 0.05745652796275781,
|
| 8116 |
+
"grad_norm": 0.13113057613372803,
|
| 8117 |
+
"learning_rate": 1.313116836323568e-05,
|
| 8118 |
+
"loss": 11.8027,
|
| 8119 |
+
"step": 1154
|
| 8120 |
+
},
|
| 8121 |
+
{
|
| 8122 |
+
"epoch": 0.057506316981789665,
|
| 8123 |
+
"grad_norm": 0.24898847937583923,
|
| 8124 |
+
"learning_rate": 1.3017804421601298e-05,
|
| 8125 |
+
"loss": 11.7922,
|
| 8126 |
+
"step": 1155
|
| 8127 |
+
},
|
| 8128 |
+
{
|
| 8129 |
+
"epoch": 0.05755610600082152,
|
| 8130 |
+
"grad_norm": 0.16602714359760284,
|
| 8131 |
+
"learning_rate": 1.290489787220096e-05,
|
| 8132 |
+
"loss": 11.7458,
|
| 8133 |
+
"step": 1156
|
| 8134 |
+
},
|
| 8135 |
+
{
|
| 8136 |
+
"epoch": 0.05760589501985337,
|
| 8137 |
+
"grad_norm": 0.1733303964138031,
|
| 8138 |
+
"learning_rate": 1.2792449308749076e-05,
|
| 8139 |
+
"loss": 11.8062,
|
| 8140 |
+
"step": 1157
|
| 8141 |
+
},
|
| 8142 |
+
{
|
| 8143 |
+
"epoch": 0.05765568403888523,
|
| 8144 |
+
"grad_norm": 0.24628570675849915,
|
| 8145 |
+
"learning_rate": 1.2680459322551652e-05,
|
| 8146 |
+
"loss": 11.7735,
|
| 8147 |
+
"step": 1158
|
| 8148 |
+
},
|
| 8149 |
+
{
|
| 8150 |
+
"epoch": 0.057705473057917074,
|
| 8151 |
+
"grad_norm": 0.27397260069847107,
|
| 8152 |
+
"learning_rate": 1.2568928502503374e-05,
|
| 8153 |
+
"loss": 11.7583,
|
| 8154 |
+
"step": 1159
|
| 8155 |
+
},
|
| 8156 |
+
{
|
| 8157 |
+
"epoch": 0.05775526207694893,
|
| 8158 |
+
"grad_norm": 0.2194259911775589,
|
| 8159 |
+
"learning_rate": 1.2457857435084408e-05,
|
| 8160 |
+
"loss": 11.7857,
|
| 8161 |
+
"step": 1160
|
| 8162 |
+
},
|
| 8163 |
+
{
|
| 8164 |
+
"epoch": 0.05780505109598078,
|
| 8165 |
+
"grad_norm": 0.21891357004642487,
|
| 8166 |
+
"learning_rate": 1.2347246704357263e-05,
|
| 8167 |
+
"loss": 11.8021,
|
| 8168 |
+
"step": 1161
|
| 8169 |
+
},
|
| 8170 |
+
{
|
| 8171 |
+
"epoch": 0.057854840115012636,
|
| 8172 |
+
"grad_norm": 0.10935965180397034,
|
| 8173 |
+
"learning_rate": 1.2237096891963862e-05,
|
| 8174 |
+
"loss": 11.8231,
|
| 8175 |
+
"step": 1162
|
| 8176 |
+
},
|
| 8177 |
+
{
|
| 8178 |
+
"epoch": 0.05790462913404449,
|
| 8179 |
+
"grad_norm": 0.16666249930858612,
|
| 8180 |
+
"learning_rate": 1.21274085771224e-05,
|
| 8181 |
+
"loss": 11.8314,
|
| 8182 |
+
"step": 1163
|
| 8183 |
+
},
|
| 8184 |
+
{
|
| 8185 |
+
"epoch": 0.05795441815307634,
|
| 8186 |
+
"grad_norm": 0.509964644908905,
|
| 8187 |
+
"learning_rate": 1.2018182336624273e-05,
|
| 8188 |
+
"loss": 11.757,
|
| 8189 |
+
"step": 1164
|
| 8190 |
+
},
|
| 8191 |
+
{
|
| 8192 |
+
"epoch": 0.05800420717210819,
|
| 8193 |
+
"grad_norm": 0.2945554852485657,
|
| 8194 |
+
"learning_rate": 1.1909418744831047e-05,
|
| 8195 |
+
"loss": 11.7879,
|
| 8196 |
+
"step": 1165
|
| 8197 |
+
},
|
| 8198 |
+
{
|
| 8199 |
+
"epoch": 0.058053996191140045,
|
| 8200 |
+
"grad_norm": 0.12194978445768356,
|
| 8201 |
+
"learning_rate": 1.1801118373671616e-05,
|
| 8202 |
+
"loss": 11.8283,
|
| 8203 |
+
"step": 1166
|
| 8204 |
+
},
|
| 8205 |
+
{
|
| 8206 |
+
"epoch": 0.0581037852101719,
|
| 8207 |
+
"grad_norm": 0.162436380982399,
|
| 8208 |
+
"learning_rate": 1.1693281792638877e-05,
|
| 8209 |
+
"loss": 11.7826,
|
| 8210 |
+
"step": 1167
|
| 8211 |
+
},
|
| 8212 |
+
{
|
| 8213 |
+
"epoch": 0.058153574229203746,
|
| 8214 |
+
"grad_norm": 0.17865686118602753,
|
| 8215 |
+
"learning_rate": 1.1585909568786957e-05,
|
| 8216 |
+
"loss": 11.8227,
|
| 8217 |
+
"step": 1168
|
| 8218 |
+
},
|
| 8219 |
+
{
|
| 8220 |
+
"epoch": 0.0582033632482356,
|
| 8221 |
+
"grad_norm": 0.17990146577358246,
|
| 8222 |
+
"learning_rate": 1.147900226672829e-05,
|
| 8223 |
+
"loss": 11.7879,
|
| 8224 |
+
"step": 1169
|
| 8225 |
+
},
|
| 8226 |
+
{
|
| 8227 |
+
"epoch": 0.058253152267267454,
|
| 8228 |
+
"grad_norm": 0.11139846593141556,
|
| 8229 |
+
"learning_rate": 1.1372560448630376e-05,
|
| 8230 |
+
"loss": 11.837,
|
| 8231 |
+
"step": 1170
|
| 8232 |
+
},
|
| 8233 |
+
{
|
| 8234 |
+
"epoch": 0.05830294128629931,
|
| 8235 |
+
"grad_norm": 0.2019157111644745,
|
| 8236 |
+
"learning_rate": 1.1266584674213043e-05,
|
| 8237 |
+
"loss": 11.807,
|
| 8238 |
+
"step": 1171
|
| 8239 |
+
},
|
| 8240 |
+
{
|
| 8241 |
+
"epoch": 0.05835273030533116,
|
| 8242 |
+
"grad_norm": 0.1705912947654724,
|
| 8243 |
+
"learning_rate": 1.1161075500745543e-05,
|
| 8244 |
+
"loss": 11.7962,
|
| 8245 |
+
"step": 1172
|
| 8246 |
+
},
|
| 8247 |
+
{
|
| 8248 |
+
"epoch": 0.05840251932436301,
|
| 8249 |
+
"grad_norm": 0.1365358978509903,
|
| 8250 |
+
"learning_rate": 1.1056033483043404e-05,
|
| 8251 |
+
"loss": 11.827,
|
| 8252 |
+
"step": 1173
|
| 8253 |
+
},
|
| 8254 |
+
{
|
| 8255 |
+
"epoch": 0.05845230834339486,
|
| 8256 |
+
"grad_norm": 0.30524614453315735,
|
| 8257 |
+
"learning_rate": 1.0951459173465629e-05,
|
| 8258 |
+
"loss": 11.782,
|
| 8259 |
+
"step": 1174
|
| 8260 |
+
},
|
| 8261 |
+
{
|
| 8262 |
+
"epoch": 0.05850209736242672,
|
| 8263 |
+
"grad_norm": 0.1527833789587021,
|
| 8264 |
+
"learning_rate": 1.0847353121911951e-05,
|
| 8265 |
+
"loss": 11.7987,
|
| 8266 |
+
"step": 1175
|
| 8267 |
+
},
|
| 8268 |
+
{
|
| 8269 |
+
"epoch": 0.05855188638145857,
|
| 8270 |
+
"grad_norm": 0.15209515392780304,
|
| 8271 |
+
"learning_rate": 1.0743715875819616e-05,
|
| 8272 |
+
"loss": 11.8091,
|
| 8273 |
+
"step": 1176
|
| 8274 |
+
},
|
| 8275 |
+
{
|
| 8276 |
+
"epoch": 0.058601675400490424,
|
| 8277 |
+
"grad_norm": 0.2607868015766144,
|
| 8278 |
+
"learning_rate": 1.0640547980160742e-05,
|
| 8279 |
+
"loss": 11.7696,
|
| 8280 |
+
"step": 1177
|
| 8281 |
+
},
|
| 8282 |
+
{
|
| 8283 |
+
"epoch": 0.05865146441952227,
|
| 8284 |
+
"grad_norm": 0.3347407281398773,
|
| 8285 |
+
"learning_rate": 1.0537849977439407e-05,
|
| 8286 |
+
"loss": 11.8151,
|
| 8287 |
+
"step": 1178
|
| 8288 |
+
},
|
| 8289 |
+
{
|
| 8290 |
+
"epoch": 0.058701253438554125,
|
| 8291 |
+
"grad_norm": 0.1564679741859436,
|
| 8292 |
+
"learning_rate": 1.0435622407688773e-05,
|
| 8293 |
+
"loss": 11.787,
|
| 8294 |
+
"step": 1179
|
| 8295 |
+
},
|
| 8296 |
+
{
|
| 8297 |
+
"epoch": 0.05875104245758598,
|
| 8298 |
+
"grad_norm": 0.11521822214126587,
|
| 8299 |
+
"learning_rate": 1.0333865808468202e-05,
|
| 8300 |
+
"loss": 11.8119,
|
| 8301 |
+
"step": 1180
|
| 8302 |
+
},
|
| 8303 |
+
{
|
| 8304 |
+
"epoch": 0.05880083147661783,
|
| 8305 |
+
"grad_norm": 0.1461268663406372,
|
| 8306 |
+
"learning_rate": 1.023258071486053e-05,
|
| 8307 |
+
"loss": 11.7931,
|
| 8308 |
+
"step": 1181
|
| 8309 |
+
},
|
| 8310 |
+
{
|
| 8311 |
+
"epoch": 0.05885062049564969,
|
| 8312 |
+
"grad_norm": 0.10272305458784103,
|
| 8313 |
+
"learning_rate": 1.0131767659469205e-05,
|
| 8314 |
+
"loss": 11.8314,
|
| 8315 |
+
"step": 1182
|
| 8316 |
+
},
|
| 8317 |
+
{
|
| 8318 |
+
"epoch": 0.058900409514681534,
|
| 8319 |
+
"grad_norm": 0.1354694664478302,
|
| 8320 |
+
"learning_rate": 1.0031427172415397e-05,
|
| 8321 |
+
"loss": 11.8289,
|
| 8322 |
+
"step": 1183
|
| 8323 |
+
},
|
| 8324 |
+
{
|
| 8325 |
+
"epoch": 0.05895019853371339,
|
| 8326 |
+
"grad_norm": 0.12800168991088867,
|
| 8327 |
+
"learning_rate": 9.93155978133541e-06,
|
| 8328 |
+
"loss": 11.7967,
|
| 8329 |
+
"step": 1184
|
| 8330 |
+
},
|
| 8331 |
+
{
|
| 8332 |
+
"epoch": 0.05899998755274524,
|
| 8333 |
+
"grad_norm": 0.48002907633781433,
|
| 8334 |
+
"learning_rate": 9.83216601137773e-06,
|
| 8335 |
+
"loss": 11.6783,
|
| 8336 |
+
"step": 1185
|
| 8337 |
+
},
|
| 8338 |
+
{
|
| 8339 |
+
"epoch": 0.059049776571777096,
|
| 8340 |
+
"grad_norm": 0.14327523112297058,
|
| 8341 |
+
"learning_rate": 9.733246385200313e-06,
|
| 8342 |
+
"loss": 11.8003,
|
| 8343 |
+
"step": 1186
|
| 8344 |
+
},
|
| 8345 |
+
{
|
| 8346 |
+
"epoch": 0.05909956559080895,
|
| 8347 |
+
"grad_norm": 0.16317032277584076,
|
| 8348 |
+
"learning_rate": 9.634801422967887e-06,
|
| 8349 |
+
"loss": 11.807,
|
| 8350 |
+
"step": 1187
|
| 8351 |
+
},
|
| 8352 |
+
{
|
| 8353 |
+
"epoch": 0.0591493546098408,
|
| 8354 |
+
"grad_norm": 0.15527381002902985,
|
| 8355 |
+
"learning_rate": 9.536831642349187e-06,
|
| 8356 |
+
"loss": 11.7985,
|
| 8357 |
+
"step": 1188
|
| 8358 |
+
},
|
| 8359 |
+
{
|
| 8360 |
+
"epoch": 0.05919914362887265,
|
| 8361 |
+
"grad_norm": 0.1702418476343155,
|
| 8362 |
+
"learning_rate": 9.439337558514161e-06,
|
| 8363 |
+
"loss": 11.8187,
|
| 8364 |
+
"step": 1189
|
| 8365 |
+
},
|
| 8366 |
+
{
|
| 8367 |
+
"epoch": 0.059248932647904505,
|
| 8368 |
+
"grad_norm": 0.14685283601284027,
|
| 8369 |
+
"learning_rate": 9.342319684131395e-06,
|
| 8370 |
+
"loss": 11.8248,
|
| 8371 |
+
"step": 1190
|
| 8372 |
+
},
|
| 8373 |
+
{
|
| 8374 |
+
"epoch": 0.05929872166693636,
|
| 8375 |
+
"grad_norm": 0.17611870169639587,
|
| 8376 |
+
"learning_rate": 9.2457785293653e-06,
|
| 8377 |
+
"loss": 11.7965,
|
| 8378 |
+
"step": 1191
|
| 8379 |
+
},
|
| 8380 |
+
{
|
| 8381 |
+
"epoch": 0.05934851068596821,
|
| 8382 |
+
"grad_norm": 0.2513555586338043,
|
| 8383 |
+
"learning_rate": 9.149714601873516e-06,
|
| 8384 |
+
"loss": 11.7584,
|
| 8385 |
+
"step": 1192
|
| 8386 |
+
},
|
| 8387 |
+
{
|
| 8388 |
+
"epoch": 0.05939829970500006,
|
| 8389 |
+
"grad_norm": 0.1695035994052887,
|
| 8390 |
+
"learning_rate": 9.054128406804185e-06,
|
| 8391 |
+
"loss": 11.7994,
|
| 8392 |
+
"step": 1193
|
| 8393 |
+
},
|
| 8394 |
+
{
|
| 8395 |
+
"epoch": 0.059448088724031914,
|
| 8396 |
+
"grad_norm": 0.14906394481658936,
|
| 8397 |
+
"learning_rate": 8.959020446793288e-06,
|
| 8398 |
+
"loss": 11.8164,
|
| 8399 |
+
"step": 1194
|
| 8400 |
+
},
|
| 8401 |
+
{
|
| 8402 |
+
"epoch": 0.05949787774306377,
|
| 8403 |
+
"grad_norm": 0.1861778050661087,
|
| 8404 |
+
"learning_rate": 8.864391221962065e-06,
|
| 8405 |
+
"loss": 11.7763,
|
| 8406 |
+
"step": 1195
|
| 8407 |
+
},
|
| 8408 |
+
{
|
| 8409 |
+
"epoch": 0.05954766676209562,
|
| 8410 |
+
"grad_norm": 0.20370015501976013,
|
| 8411 |
+
"learning_rate": 8.770241229914356e-06,
|
| 8412 |
+
"loss": 11.7964,
|
| 8413 |
+
"step": 1196
|
| 8414 |
+
},
|
| 8415 |
+
{
|
| 8416 |
+
"epoch": 0.059597455781127476,
|
| 8417 |
+
"grad_norm": 0.2808484733104706,
|
| 8418 |
+
"learning_rate": 8.67657096573391e-06,
|
| 8419 |
+
"loss": 11.7856,
|
| 8420 |
+
"step": 1197
|
| 8421 |
+
},
|
| 8422 |
+
{
|
| 8423 |
+
"epoch": 0.05964724480015932,
|
| 8424 |
+
"grad_norm": 0.38494062423706055,
|
| 8425 |
+
"learning_rate": 8.583380921981931e-06,
|
| 8426 |
+
"loss": 11.7022,
|
| 8427 |
+
"step": 1198
|
| 8428 |
+
},
|
| 8429 |
+
{
|
| 8430 |
+
"epoch": 0.05969703381919118,
|
| 8431 |
+
"grad_norm": 0.2538752853870392,
|
| 8432 |
+
"learning_rate": 8.490671588694333e-06,
|
| 8433 |
+
"loss": 11.8367,
|
| 8434 |
+
"step": 1199
|
| 8435 |
+
},
|
| 8436 |
+
{
|
| 8437 |
+
"epoch": 0.05974682283822303,
|
| 8438 |
+
"grad_norm": 0.20636320114135742,
|
| 8439 |
+
"learning_rate": 8.398443453379267e-06,
|
| 8440 |
+
"loss": 11.7952,
|
| 8441 |
+
"step": 1200
|
| 8442 |
+
},
|
| 8443 |
+
{
|
| 8444 |
+
"epoch": 0.059796611857254885,
|
| 8445 |
+
"grad_norm": 0.20967461168766022,
|
| 8446 |
+
"learning_rate": 8.306697001014552e-06,
|
| 8447 |
+
"loss": 11.7329,
|
| 8448 |
+
"step": 1201
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.05984640087628673,
|
| 8452 |
+
"grad_norm": 0.21113517880439758,
|
| 8453 |
+
"learning_rate": 8.215432714045024e-06,
|
| 8454 |
+
"loss": 11.8077,
|
| 8455 |
+
"step": 1202
|
| 8456 |
+
},
|
| 8457 |
+
{
|
| 8458 |
+
"epoch": 0.059896189895318586,
|
| 8459 |
+
"grad_norm": 0.18404564261436462,
|
| 8460 |
+
"learning_rate": 8.124651072380152e-06,
|
| 8461 |
+
"loss": 11.8315,
|
| 8462 |
+
"step": 1203
|
| 8463 |
+
},
|
| 8464 |
+
{
|
| 8465 |
+
"epoch": 0.05994597891435044,
|
| 8466 |
+
"grad_norm": 0.1226145401597023,
|
| 8467 |
+
"learning_rate": 8.034352553391367e-06,
|
| 8468 |
+
"loss": 11.8285,
|
| 8469 |
+
"step": 1204
|
| 8470 |
+
},
|
| 8471 |
+
{
|
| 8472 |
+
"epoch": 0.059995767933382294,
|
| 8473 |
+
"grad_norm": 0.1711537092924118,
|
| 8474 |
+
"learning_rate": 7.944537631909665e-06,
|
| 8475 |
+
"loss": 11.8056,
|
| 8476 |
+
"step": 1205
|
| 8477 |
+
},
|
| 8478 |
+
{
|
| 8479 |
+
"epoch": 0.06004555695241415,
|
| 8480 |
+
"grad_norm": 0.13307708501815796,
|
| 8481 |
+
"learning_rate": 7.855206780223056e-06,
|
| 8482 |
+
"loss": 11.8217,
|
| 8483 |
+
"step": 1206
|
| 8484 |
+
},
|
| 8485 |
+
{
|
| 8486 |
+
"epoch": 0.060095345971445994,
|
| 8487 |
+
"grad_norm": 0.14825309813022614,
|
| 8488 |
+
"learning_rate": 7.766360468074074e-06,
|
| 8489 |
+
"loss": 11.8133,
|
| 8490 |
+
"step": 1207
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 0.06014513499047785,
|
| 8494 |
+
"grad_norm": 0.13985204696655273,
|
| 8495 |
+
"learning_rate": 7.677999162657335e-06,
|
| 8496 |
+
"loss": 11.7968,
|
| 8497 |
+
"step": 1208
|
| 8498 |
+
},
|
| 8499 |
+
{
|
| 8500 |
+
"epoch": 0.0601949240095097,
|
| 8501 |
+
"grad_norm": 0.15342532098293304,
|
| 8502 |
+
"learning_rate": 7.5901233286170825e-06,
|
| 8503 |
+
"loss": 11.8002,
|
| 8504 |
+
"step": 1209
|
| 8505 |
+
},
|
| 8506 |
+
{
|
| 8507 |
+
"epoch": 0.060244713028541556,
|
| 8508 |
+
"grad_norm": 0.28368210792541504,
|
| 8509 |
+
"learning_rate": 7.502733428044683e-06,
|
| 8510 |
+
"loss": 11.7454,
|
| 8511 |
+
"step": 1210
|
| 8512 |
+
},
|
| 8513 |
+
{
|
| 8514 |
+
"epoch": 0.06029450204757341,
|
| 8515 |
+
"grad_norm": 0.13783149421215057,
|
| 8516 |
+
"learning_rate": 7.41582992047628e-06,
|
| 8517 |
+
"loss": 11.741,
|
| 8518 |
+
"step": 1211
|
| 8519 |
+
},
|
| 8520 |
+
{
|
| 8521 |
+
"epoch": 0.06034429106660526,
|
| 8522 |
+
"grad_norm": 0.18922853469848633,
|
| 8523 |
+
"learning_rate": 7.32941326289035e-06,
|
| 8524 |
+
"loss": 11.8135,
|
| 8525 |
+
"step": 1212
|
| 8526 |
+
},
|
| 8527 |
+
{
|
| 8528 |
+
"epoch": 0.06039408008563711,
|
| 8529 |
+
"grad_norm": 0.12326914072036743,
|
| 8530 |
+
"learning_rate": 7.243483909705229e-06,
|
| 8531 |
+
"loss": 11.8214,
|
| 8532 |
+
"step": 1213
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 0.060443869104668965,
|
| 8536 |
+
"grad_norm": 0.21751324832439423,
|
| 8537 |
+
"learning_rate": 7.158042312776847e-06,
|
| 8538 |
+
"loss": 11.797,
|
| 8539 |
+
"step": 1214
|
| 8540 |
+
},
|
| 8541 |
+
{
|
| 8542 |
+
"epoch": 0.06049365812370082,
|
| 8543 |
+
"grad_norm": 0.23689574003219604,
|
| 8544 |
+
"learning_rate": 7.073088921396287e-06,
|
| 8545 |
+
"loss": 11.7536,
|
| 8546 |
+
"step": 1215
|
| 8547 |
+
},
|
| 8548 |
+
{
|
| 8549 |
+
"epoch": 0.06054344714273267,
|
| 8550 |
+
"grad_norm": 0.19089733064174652,
|
| 8551 |
+
"learning_rate": 6.988624182287362e-06,
|
| 8552 |
+
"loss": 11.7794,
|
| 8553 |
+
"step": 1216
|
| 8554 |
+
},
|
| 8555 |
+
{
|
| 8556 |
+
"epoch": 0.06059323616176452,
|
| 8557 |
+
"grad_norm": 0.2251942753791809,
|
| 8558 |
+
"learning_rate": 6.904648539604364e-06,
|
| 8559 |
+
"loss": 11.7984,
|
| 8560 |
+
"step": 1217
|
| 8561 |
+
},
|
| 8562 |
+
{
|
| 8563 |
+
"epoch": 0.060643025180796374,
|
| 8564 |
+
"grad_norm": 0.14975538849830627,
|
| 8565 |
+
"learning_rate": 6.82116243492974e-06,
|
| 8566 |
+
"loss": 11.7945,
|
| 8567 |
+
"step": 1218
|
| 8568 |
+
},
|
| 8569 |
+
{
|
| 8570 |
+
"epoch": 0.06069281419982823,
|
| 8571 |
+
"grad_norm": 0.1450999677181244,
|
| 8572 |
+
"learning_rate": 6.738166307271643e-06,
|
| 8573 |
+
"loss": 11.8085,
|
| 8574 |
+
"step": 1219
|
| 8575 |
+
},
|
| 8576 |
+
{
|
| 8577 |
+
"epoch": 0.06074260321886008,
|
| 8578 |
+
"grad_norm": 0.1778717190027237,
|
| 8579 |
+
"learning_rate": 6.655660593061719e-06,
|
| 8580 |
+
"loss": 11.7916,
|
| 8581 |
+
"step": 1220
|
| 8582 |
+
},
|
| 8583 |
+
{
|
| 8584 |
+
"epoch": 0.060792392237891936,
|
| 8585 |
+
"grad_norm": 0.21637047827243805,
|
| 8586 |
+
"learning_rate": 6.573645726152866e-06,
|
| 8587 |
+
"loss": 11.8062,
|
| 8588 |
+
"step": 1221
|
| 8589 |
+
},
|
| 8590 |
+
{
|
| 8591 |
+
"epoch": 0.06084218125692378,
|
| 8592 |
+
"grad_norm": 0.2620675265789032,
|
| 8593 |
+
"learning_rate": 6.4921221378167915e-06,
|
| 8594 |
+
"loss": 11.7433,
|
| 8595 |
+
"step": 1222
|
| 8596 |
+
},
|
| 8597 |
+
{
|
| 8598 |
+
"epoch": 0.06089197027595564,
|
| 8599 |
+
"grad_norm": 0.14118359982967377,
|
| 8600 |
+
"learning_rate": 6.411090256741847e-06,
|
| 8601 |
+
"loss": 11.7837,
|
| 8602 |
+
"step": 1223
|
| 8603 |
+
},
|
| 8604 |
+
{
|
| 8605 |
+
"epoch": 0.06094175929498749,
|
| 8606 |
+
"grad_norm": 0.13946916162967682,
|
| 8607 |
+
"learning_rate": 6.330550509030852e-06,
|
| 8608 |
+
"loss": 11.8063,
|
| 8609 |
+
"step": 1224
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 0.060991548314019345,
|
| 8613 |
+
"grad_norm": 0.22096039354801178,
|
| 8614 |
+
"learning_rate": 6.250503318198664e-06,
|
| 8615 |
+
"loss": 11.7695,
|
| 8616 |
+
"step": 1225
|
| 8617 |
+
},
|
| 8618 |
+
{
|
| 8619 |
+
"epoch": 0.0610413373330512,
|
| 8620 |
+
"grad_norm": 0.17125830054283142,
|
| 8621 |
+
"learning_rate": 6.170949105170043e-06,
|
| 8622 |
+
"loss": 11.7887,
|
| 8623 |
+
"step": 1226
|
| 8624 |
+
},
|
| 8625 |
+
{
|
| 8626 |
+
"epoch": 0.061091126352083046,
|
| 8627 |
+
"grad_norm": 0.12528559565544128,
|
| 8628 |
+
"learning_rate": 6.091888288277569e-06,
|
| 8629 |
+
"loss": 11.8334,
|
| 8630 |
+
"step": 1227
|
| 8631 |
+
},
|
| 8632 |
+
{
|
| 8633 |
+
"epoch": 0.0611409153711149,
|
| 8634 |
+
"grad_norm": 0.1502029001712799,
|
| 8635 |
+
"learning_rate": 6.013321283259199e-06,
|
| 8636 |
+
"loss": 11.8316,
|
| 8637 |
+
"step": 1228
|
| 8638 |
+
},
|
| 8639 |
+
{
|
| 8640 |
+
"epoch": 0.061190704390146754,
|
| 8641 |
+
"grad_norm": 0.1485135555267334,
|
| 8642 |
+
"learning_rate": 5.935248503256219e-06,
|
| 8643 |
+
"loss": 11.806,
|
| 8644 |
+
"step": 1229
|
| 8645 |
+
},
|
| 8646 |
+
{
|
| 8647 |
+
"epoch": 0.06124049340917861,
|
| 8648 |
+
"grad_norm": 0.23023825883865356,
|
| 8649 |
+
"learning_rate": 5.857670358811096e-06,
|
| 8650 |
+
"loss": 11.742,
|
| 8651 |
+
"step": 1230
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 0.06129028242821046,
|
| 8655 |
+
"grad_norm": 0.21136432886123657,
|
| 8656 |
+
"learning_rate": 5.780587257865266e-06,
|
| 8657 |
+
"loss": 11.8252,
|
| 8658 |
+
"step": 1231
|
| 8659 |
+
},
|
| 8660 |
+
{
|
| 8661 |
+
"epoch": 0.06134007144724231,
|
| 8662 |
+
"grad_norm": 0.18180683255195618,
|
| 8663 |
+
"learning_rate": 5.70399960575696e-06,
|
| 8664 |
+
"loss": 11.8149,
|
| 8665 |
+
"step": 1232
|
| 8666 |
+
},
|
| 8667 |
+
{
|
| 8668 |
+
"epoch": 0.06138986046627416,
|
| 8669 |
+
"grad_norm": 0.12026200443506241,
|
| 8670 |
+
"learning_rate": 5.627907805219168e-06,
|
| 8671 |
+
"loss": 11.7947,
|
| 8672 |
+
"step": 1233
|
| 8673 |
+
},
|
| 8674 |
+
{
|
| 8675 |
+
"epoch": 0.06143964948530602,
|
| 8676 |
+
"grad_norm": 0.3022720515727997,
|
| 8677 |
+
"learning_rate": 5.552312256377423e-06,
|
| 8678 |
+
"loss": 11.7993,
|
| 8679 |
+
"step": 1234
|
| 8680 |
+
},
|
| 8681 |
+
{
|
| 8682 |
+
"epoch": 0.06148943850433787,
|
| 8683 |
+
"grad_norm": 0.1529982089996338,
|
| 8684 |
+
"learning_rate": 5.477213356747746e-06,
|
| 8685 |
+
"loss": 11.7681,
|
| 8686 |
+
"step": 1235
|
| 8687 |
+
},
|
| 8688 |
+
{
|
| 8689 |
+
"epoch": 0.06153922752336972,
|
| 8690 |
+
"grad_norm": 0.2752259075641632,
|
| 8691 |
+
"learning_rate": 5.402611501234578e-06,
|
| 8692 |
+
"loss": 11.7955,
|
| 8693 |
+
"step": 1236
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 0.06158901654240157,
|
| 8697 |
+
"grad_norm": 0.14768163859844208,
|
| 8698 |
+
"learning_rate": 5.328507082128642e-06,
|
| 8699 |
+
"loss": 11.8368,
|
| 8700 |
+
"step": 1237
|
| 8701 |
+
},
|
| 8702 |
+
{
|
| 8703 |
+
"epoch": 0.061638805561433425,
|
| 8704 |
+
"grad_norm": 0.1358931064605713,
|
| 8705 |
+
"learning_rate": 5.254900489104919e-06,
|
| 8706 |
+
"loss": 11.8239,
|
| 8707 |
+
"step": 1238
|
| 8708 |
+
},
|
| 8709 |
+
{
|
| 8710 |
+
"epoch": 0.06168859458046528,
|
| 8711 |
+
"grad_norm": 0.157319113612175,
|
| 8712 |
+
"learning_rate": 5.1817921092205955e-06,
|
| 8713 |
+
"loss": 11.8053,
|
| 8714 |
+
"step": 1239
|
| 8715 |
+
},
|
| 8716 |
+
{
|
| 8717 |
+
"epoch": 0.06173838359949713,
|
| 8718 |
+
"grad_norm": 0.1696845442056656,
|
| 8719 |
+
"learning_rate": 5.109182326913054e-06,
|
| 8720 |
+
"loss": 11.8271,
|
| 8721 |
+
"step": 1240
|
| 8722 |
+
},
|
| 8723 |
+
{
|
| 8724 |
+
"epoch": 0.06178817261852898,
|
| 8725 |
+
"grad_norm": 0.12255169451236725,
|
| 8726 |
+
"learning_rate": 5.0370715239977495e-06,
|
| 8727 |
+
"loss": 11.8071,
|
| 8728 |
+
"step": 1241
|
| 8729 |
+
},
|
| 8730 |
+
{
|
| 8731 |
+
"epoch": 0.061837961637560834,
|
| 8732 |
+
"grad_norm": 0.2727164924144745,
|
| 8733 |
+
"learning_rate": 4.965460079666362e-06,
|
| 8734 |
+
"loss": 11.7886,
|
| 8735 |
+
"step": 1242
|
| 8736 |
+
},
|
| 8737 |
+
{
|
| 8738 |
+
"epoch": 0.06188775065659269,
|
| 8739 |
+
"grad_norm": 0.1132870465517044,
|
| 8740 |
+
"learning_rate": 4.8943483704846475e-06,
|
| 8741 |
+
"loss": 11.8142,
|
| 8742 |
+
"step": 1243
|
| 8743 |
+
},
|
| 8744 |
+
{
|
| 8745 |
+
"epoch": 0.06193753967562454,
|
| 8746 |
+
"grad_norm": 0.14206351339817047,
|
| 8747 |
+
"learning_rate": 4.823736770390552e-06,
|
| 8748 |
+
"loss": 11.8085,
|
| 8749 |
+
"step": 1244
|
| 8750 |
+
},
|
| 8751 |
+
{
|
| 8752 |
+
"epoch": 0.061987328694656396,
|
| 8753 |
+
"grad_norm": 0.28197649121284485,
|
| 8754 |
+
"learning_rate": 4.7536256506922506e-06,
|
| 8755 |
+
"loss": 11.806,
|
| 8756 |
+
"step": 1245
|
| 8757 |
+
},
|
| 8758 |
+
{
|
| 8759 |
+
"epoch": 0.06203711771368824,
|
| 8760 |
+
"grad_norm": 0.17642013728618622,
|
| 8761 |
+
"learning_rate": 4.684015380066087e-06,
|
| 8762 |
+
"loss": 11.8373,
|
| 8763 |
+
"step": 1246
|
| 8764 |
+
},
|
| 8765 |
+
{
|
| 8766 |
+
"epoch": 0.0620869067327201,
|
| 8767 |
+
"grad_norm": 0.18471746146678925,
|
| 8768 |
+
"learning_rate": 4.61490632455478e-06,
|
| 8769 |
+
"loss": 11.7942,
|
| 8770 |
+
"step": 1247
|
| 8771 |
+
},
|
| 8772 |
+
{
|
| 8773 |
+
"epoch": 0.06213669575175195,
|
| 8774 |
+
"grad_norm": 0.21035756170749664,
|
| 8775 |
+
"learning_rate": 4.546298847565411e-06,
|
| 8776 |
+
"loss": 11.7347,
|
| 8777 |
+
"step": 1248
|
| 8778 |
+
},
|
| 8779 |
+
{
|
| 8780 |
+
"epoch": 0.062186484770783805,
|
| 8781 |
+
"grad_norm": 0.2133786678314209,
|
| 8782 |
+
"learning_rate": 4.4781933098674955e-06,
|
| 8783 |
+
"loss": 11.8266,
|
| 8784 |
+
"step": 1249
|
| 8785 |
+
},
|
| 8786 |
+
{
|
| 8787 |
+
"epoch": 0.06223627378981566,
|
| 8788 |
+
"grad_norm": 0.23482516407966614,
|
| 8789 |
+
"learning_rate": 4.410590069591192e-06,
|
| 8790 |
+
"loss": 11.7782,
|
| 8791 |
+
"step": 1250
|
| 8792 |
+
},
|
| 8793 |
+
{
|
| 8794 |
+
"epoch": 0.062286062808847506,
|
| 8795 |
+
"grad_norm": 0.1743435114622116,
|
| 8796 |
+
"learning_rate": 4.3434894822252826e-06,
|
| 8797 |
+
"loss": 11.7312,
|
| 8798 |
+
"step": 1251
|
| 8799 |
+
},
|
| 8800 |
+
{
|
| 8801 |
+
"epoch": 0.06233585182787936,
|
| 8802 |
+
"grad_norm": 0.15730774402618408,
|
| 8803 |
+
"learning_rate": 4.2768919006153876e-06,
|
| 8804 |
+
"loss": 11.8152,
|
| 8805 |
+
"step": 1252
|
| 8806 |
+
},
|
| 8807 |
+
{
|
| 8808 |
+
"epoch": 0.062385640846911214,
|
| 8809 |
+
"grad_norm": 0.14673897624015808,
|
| 8810 |
+
"learning_rate": 4.2107976749621416e-06,
|
| 8811 |
+
"loss": 11.8289,
|
| 8812 |
+
"step": 1253
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 0.06243542986594307,
|
| 8816 |
+
"grad_norm": 0.12018892168998718,
|
| 8817 |
+
"learning_rate": 4.14520715281923e-06,
|
| 8818 |
+
"loss": 11.8018,
|
| 8819 |
+
"step": 1254
|
| 8820 |
+
},
|
| 8821 |
+
{
|
| 8822 |
+
"epoch": 0.06248521888497492,
|
| 8823 |
+
"grad_norm": 0.16781073808670044,
|
| 8824 |
+
"learning_rate": 4.0801206790916814e-06,
|
| 8825 |
+
"loss": 11.7999,
|
| 8826 |
+
"step": 1255
|
| 8827 |
+
},
|
| 8828 |
+
{
|
| 8829 |
+
"epoch": 0.06253500790400678,
|
| 8830 |
+
"grad_norm": 0.32293230295181274,
|
| 8831 |
+
"learning_rate": 4.015538596033974e-06,
|
| 8832 |
+
"loss": 11.7809,
|
| 8833 |
+
"step": 1256
|
| 8834 |
+
},
|
| 8835 |
+
{
|
| 8836 |
+
"epoch": 0.06258479692303863,
|
| 8837 |
+
"grad_norm": 0.2565004825592041,
|
| 8838 |
+
"learning_rate": 3.951461243248311e-06,
|
| 8839 |
+
"loss": 11.7569,
|
| 8840 |
+
"step": 1257
|
| 8841 |
+
},
|
| 8842 |
+
{
|
| 8843 |
+
"epoch": 0.06263458594207047,
|
| 8844 |
+
"grad_norm": 0.14128616452217102,
|
| 8845 |
+
"learning_rate": 3.887888957682772e-06,
|
| 8846 |
+
"loss": 11.8277,
|
| 8847 |
+
"step": 1258
|
| 8848 |
+
},
|
| 8849 |
+
{
|
| 8850 |
+
"epoch": 0.06268437496110232,
|
| 8851 |
+
"grad_norm": 0.15104006230831146,
|
| 8852 |
+
"learning_rate": 3.82482207362953e-06,
|
| 8853 |
+
"loss": 11.804,
|
| 8854 |
+
"step": 1259
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 0.06273416398013418,
|
| 8858 |
+
"grad_norm": 0.15629109740257263,
|
| 8859 |
+
"learning_rate": 3.7622609227231818e-06,
|
| 8860 |
+
"loss": 11.7545,
|
| 8861 |
+
"step": 1260
|
| 8862 |
+
},
|
| 8863 |
+
{
|
| 8864 |
+
"epoch": 0.06278395299916603,
|
| 8865 |
+
"grad_norm": 0.1471661925315857,
|
| 8866 |
+
"learning_rate": 3.700205833938919e-06,
|
| 8867 |
+
"loss": 11.8132,
|
| 8868 |
+
"step": 1261
|
| 8869 |
+
},
|
| 8870 |
+
{
|
| 8871 |
+
"epoch": 0.06283374201819789,
|
| 8872 |
+
"grad_norm": 0.14156818389892578,
|
| 8873 |
+
"learning_rate": 3.638657133590817e-06,
|
| 8874 |
+
"loss": 11.8142,
|
| 8875 |
+
"step": 1262
|
| 8876 |
+
},
|
| 8877 |
+
{
|
| 8878 |
+
"epoch": 0.06288353103722974,
|
| 8879 |
+
"grad_norm": 0.15864253044128418,
|
| 8880 |
+
"learning_rate": 3.5776151453301265e-06,
|
| 8881 |
+
"loss": 11.7972,
|
| 8882 |
+
"step": 1263
|
| 8883 |
+
},
|
| 8884 |
+
{
|
| 8885 |
+
"epoch": 0.0629333200562616,
|
| 8886 |
+
"grad_norm": 0.3222000300884247,
|
| 8887 |
+
"learning_rate": 3.517080190143629e-06,
|
| 8888 |
+
"loss": 11.8109,
|
| 8889 |
+
"step": 1264
|
| 8890 |
+
},
|
| 8891 |
+
{
|
| 8892 |
+
"epoch": 0.06298310907529345,
|
| 8893 |
+
"grad_norm": 0.17842133343219757,
|
| 8894 |
+
"learning_rate": 3.4570525863518165e-06,
|
| 8895 |
+
"loss": 11.7952,
|
| 8896 |
+
"step": 1265
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 0.0630328980943253,
|
| 8900 |
+
"grad_norm": 0.166460782289505,
|
| 8901 |
+
"learning_rate": 3.3975326496073377e-06,
|
| 8902 |
+
"loss": 11.7911,
|
| 8903 |
+
"step": 1266
|
| 8904 |
+
},
|
| 8905 |
+
{
|
| 8906 |
+
"epoch": 0.06308268711335716,
|
| 8907 |
+
"grad_norm": 0.1349351853132248,
|
| 8908 |
+
"learning_rate": 3.3385206928933097e-06,
|
| 8909 |
+
"loss": 11.8201,
|
| 8910 |
+
"step": 1267
|
| 8911 |
+
},
|
| 8912 |
+
{
|
| 8913 |
+
"epoch": 0.063132476132389,
|
| 8914 |
+
"grad_norm": 0.25642573833465576,
|
| 8915 |
+
"learning_rate": 3.280017026521598e-06,
|
| 8916 |
+
"loss": 11.766,
|
| 8917 |
+
"step": 1268
|
| 8918 |
+
},
|
| 8919 |
+
{
|
| 8920 |
+
"epoch": 0.06318226515142085,
|
| 8921 |
+
"grad_norm": 0.19068282842636108,
|
| 8922 |
+
"learning_rate": 3.222021958131316e-06,
|
| 8923 |
+
"loss": 11.7466,
|
| 8924 |
+
"step": 1269
|
| 8925 |
+
},
|
| 8926 |
+
{
|
| 8927 |
+
"epoch": 0.0632320541704527,
|
| 8928 |
+
"grad_norm": 0.1922597438097,
|
| 8929 |
+
"learning_rate": 3.1645357926870955e-06,
|
| 8930 |
+
"loss": 11.8131,
|
| 8931 |
+
"step": 1270
|
| 8932 |
+
},
|
| 8933 |
+
{
|
| 8934 |
+
"epoch": 0.06328184318948456,
|
| 8935 |
+
"grad_norm": 0.15186841785907745,
|
| 8936 |
+
"learning_rate": 3.1075588324775198e-06,
|
| 8937 |
+
"loss": 11.8211,
|
| 8938 |
+
"step": 1271
|
| 8939 |
+
},
|
| 8940 |
+
{
|
| 8941 |
+
"epoch": 0.06333163220851641,
|
| 8942 |
+
"grad_norm": 0.18055717647075653,
|
| 8943 |
+
"learning_rate": 3.0510913771135463e-06,
|
| 8944 |
+
"loss": 11.7712,
|
| 8945 |
+
"step": 1272
|
| 8946 |
+
},
|
| 8947 |
+
{
|
| 8948 |
+
"epoch": 0.06338142122754827,
|
| 8949 |
+
"grad_norm": 0.2007443755865097,
|
| 8950 |
+
"learning_rate": 2.995133723526944e-06,
|
| 8951 |
+
"loss": 11.8153,
|
| 8952 |
+
"step": 1273
|
| 8953 |
+
},
|
| 8954 |
+
{
|
| 8955 |
+
"epoch": 0.06343121024658012,
|
| 8956 |
+
"grad_norm": 0.24786053597927094,
|
| 8957 |
+
"learning_rate": 2.9396861659686915e-06,
|
| 8958 |
+
"loss": 11.7832,
|
| 8959 |
+
"step": 1274
|
| 8960 |
+
},
|
| 8961 |
+
{
|
| 8962 |
+
"epoch": 0.06348099926561197,
|
| 8963 |
+
"grad_norm": 0.22735446691513062,
|
| 8964 |
+
"learning_rate": 2.8847489960074136e-06,
|
| 8965 |
+
"loss": 11.7468,
|
| 8966 |
+
"step": 1275
|
| 8967 |
+
},
|
| 8968 |
+
{
|
| 8969 |
+
"epoch": 0.06353078828464383,
|
| 8970 |
+
"grad_norm": 0.21603840589523315,
|
| 8971 |
+
"learning_rate": 2.8303225025279712e-06,
|
| 8972 |
+
"loss": 11.7641,
|
| 8973 |
+
"step": 1276
|
| 8974 |
+
},
|
| 8975 |
+
{
|
| 8976 |
+
"epoch": 0.06358057730367568,
|
| 8977 |
+
"grad_norm": 0.12813854217529297,
|
| 8978 |
+
"learning_rate": 2.7764069717297724e-06,
|
| 8979 |
+
"loss": 11.8233,
|
| 8980 |
+
"step": 1277
|
| 8981 |
+
},
|
| 8982 |
+
{
|
| 8983 |
+
"epoch": 0.06363036632270752,
|
| 8984 |
+
"grad_norm": 0.21261170506477356,
|
| 8985 |
+
"learning_rate": 2.7230026871253534e-06,
|
| 8986 |
+
"loss": 11.6896,
|
| 8987 |
+
"step": 1278
|
| 8988 |
+
},
|
| 8989 |
+
{
|
| 8990 |
+
"epoch": 0.06368015534173938,
|
| 8991 |
+
"grad_norm": 0.14187957346439362,
|
| 8992 |
+
"learning_rate": 2.6701099295389666e-06,
|
| 8993 |
+
"loss": 11.8089,
|
| 8994 |
+
"step": 1279
|
| 8995 |
+
},
|
| 8996 |
+
{
|
| 8997 |
+
"epoch": 0.06372994436077123,
|
| 8998 |
+
"grad_norm": 0.14367730915546417,
|
| 8999 |
+
"learning_rate": 2.6177289771049274e-06,
|
| 9000 |
+
"loss": 11.8024,
|
| 9001 |
+
"step": 1280
|
| 9002 |
+
},
|
| 9003 |
+
{
|
| 9004 |
+
"epoch": 0.06377973337980308,
|
| 9005 |
+
"grad_norm": 0.18137389421463013,
|
| 9006 |
+
"learning_rate": 2.5658601052662825e-06,
|
| 9007 |
+
"loss": 11.8067,
|
| 9008 |
+
"step": 1281
|
| 9009 |
+
},
|
| 9010 |
+
{
|
| 9011 |
+
"epoch": 0.06382952239883494,
|
| 9012 |
+
"grad_norm": 0.17115594446659088,
|
| 9013 |
+
"learning_rate": 2.5145035867733312e-06,
|
| 9014 |
+
"loss": 11.7795,
|
| 9015 |
+
"step": 1282
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 0.06387931141786679,
|
| 9019 |
+
"grad_norm": 0.1856398731470108,
|
| 9020 |
+
"learning_rate": 2.4636596916821853e-06,
|
| 9021 |
+
"loss": 11.7799,
|
| 9022 |
+
"step": 1283
|
| 9023 |
+
},
|
| 9024 |
+
{
|
| 9025 |
+
"epoch": 0.06392910043689864,
|
| 9026 |
+
"grad_norm": 0.22978371381759644,
|
| 9027 |
+
"learning_rate": 2.4133286873533112e-06,
|
| 9028 |
+
"loss": 11.7418,
|
| 9029 |
+
"step": 1284
|
| 9030 |
+
},
|
| 9031 |
+
{
|
| 9032 |
+
"epoch": 0.0639788894559305,
|
| 9033 |
+
"grad_norm": 0.22722041606903076,
|
| 9034 |
+
"learning_rate": 2.3635108384502e-06,
|
| 9035 |
+
"loss": 11.7716,
|
| 9036 |
+
"step": 1285
|
| 9037 |
+
},
|
| 9038 |
+
{
|
| 9039 |
+
"epoch": 0.06402867847496235,
|
| 9040 |
+
"grad_norm": 0.19082805514335632,
|
| 9041 |
+
"learning_rate": 2.3142064069379466e-06,
|
| 9042 |
+
"loss": 11.7825,
|
| 9043 |
+
"step": 1286
|
| 9044 |
+
},
|
| 9045 |
+
{
|
| 9046 |
+
"epoch": 0.06407846749399419,
|
| 9047 |
+
"grad_norm": 0.18441222608089447,
|
| 9048 |
+
"learning_rate": 2.265415652081804e-06,
|
| 9049 |
+
"loss": 11.8302,
|
| 9050 |
+
"step": 1287
|
| 9051 |
+
},
|
| 9052 |
+
{
|
| 9053 |
+
"epoch": 0.06412825651302605,
|
| 9054 |
+
"grad_norm": 0.16945794224739075,
|
| 9055 |
+
"learning_rate": 2.2171388304459416e-06,
|
| 9056 |
+
"loss": 11.7562,
|
| 9057 |
+
"step": 1288
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 0.0641780455320579,
|
| 9061 |
+
"grad_norm": 0.18082420527935028,
|
| 9062 |
+
"learning_rate": 2.1693761958920033e-06,
|
| 9063 |
+
"loss": 11.8047,
|
| 9064 |
+
"step": 1289
|
| 9065 |
+
},
|
| 9066 |
+
{
|
| 9067 |
+
"epoch": 0.06422783455108975,
|
| 9068 |
+
"grad_norm": 0.13348349928855896,
|
| 9069 |
+
"learning_rate": 2.122127999577783e-06,
|
| 9070 |
+
"loss": 11.8187,
|
| 9071 |
+
"step": 1290
|
| 9072 |
+
},
|
| 9073 |
+
{
|
| 9074 |
+
"epoch": 0.06427762357012161,
|
| 9075 |
+
"grad_norm": 0.21678856015205383,
|
| 9076 |
+
"learning_rate": 2.0753944899559507e-06,
|
| 9077 |
+
"loss": 11.7401,
|
| 9078 |
+
"step": 1291
|
| 9079 |
+
},
|
| 9080 |
+
{
|
| 9081 |
+
"epoch": 0.06432741258915346,
|
| 9082 |
+
"grad_norm": 0.16920457780361176,
|
| 9083 |
+
"learning_rate": 2.0291759127727294e-06,
|
| 9084 |
+
"loss": 11.8164,
|
| 9085 |
+
"step": 1292
|
| 9086 |
+
},
|
| 9087 |
+
{
|
| 9088 |
+
"epoch": 0.06437720160818532,
|
| 9089 |
+
"grad_norm": 0.2036391645669937,
|
| 9090 |
+
"learning_rate": 1.983472511066542e-06,
|
| 9091 |
+
"loss": 11.7787,
|
| 9092 |
+
"step": 1293
|
| 9093 |
+
},
|
| 9094 |
+
{
|
| 9095 |
+
"epoch": 0.06442699062721717,
|
| 9096 |
+
"grad_norm": 0.16807925701141357,
|
| 9097 |
+
"learning_rate": 1.9382845251668335e-06,
|
| 9098 |
+
"loss": 11.7749,
|
| 9099 |
+
"step": 1294
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 0.06447677964624902,
|
| 9103 |
+
"grad_norm": 0.3364085257053375,
|
| 9104 |
+
"learning_rate": 1.893612192692751e-06,
|
| 9105 |
+
"loss": 11.8033,
|
| 9106 |
+
"step": 1295
|
| 9107 |
+
},
|
| 9108 |
+
{
|
| 9109 |
+
"epoch": 0.06452656866528088,
|
| 9110 |
+
"grad_norm": 0.14825333654880524,
|
| 9111 |
+
"learning_rate": 1.8494557485518426e-06,
|
| 9112 |
+
"loss": 11.8268,
|
| 9113 |
+
"step": 1296
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 0.06457635768431272,
|
| 9117 |
+
"grad_norm": 0.2889108657836914,
|
| 9118 |
+
"learning_rate": 1.8058154249389502e-06,
|
| 9119 |
+
"loss": 11.8164,
|
| 9120 |
+
"step": 1297
|
| 9121 |
+
},
|
| 9122 |
+
{
|
| 9123 |
+
"epoch": 0.06462614670334457,
|
| 9124 |
+
"grad_norm": 0.19865798950195312,
|
| 9125 |
+
"learning_rate": 1.7626914513348858e-06,
|
| 9126 |
+
"loss": 11.7564,
|
| 9127 |
+
"step": 1298
|
| 9128 |
+
},
|
| 9129 |
+
{
|
| 9130 |
+
"epoch": 0.06467593572237643,
|
| 9131 |
+
"grad_norm": 0.12114917486906052,
|
| 9132 |
+
"learning_rate": 1.7200840545052444e-06,
|
| 9133 |
+
"loss": 11.8239,
|
| 9134 |
+
"step": 1299
|
| 9135 |
+
},
|
| 9136 |
+
{
|
| 9137 |
+
"epoch": 0.06472572474140828,
|
| 9138 |
+
"grad_norm": 0.4188714921474457,
|
| 9139 |
+
"learning_rate": 1.6779934584992718e-06,
|
| 9140 |
+
"loss": 11.7266,
|
| 9141 |
+
"step": 1300
|
| 9142 |
+
},
|
| 9143 |
+
{
|
| 9144 |
+
"epoch": 0.06477551376044013,
|
| 9145 |
+
"grad_norm": 0.1227809488773346,
|
| 9146 |
+
"learning_rate": 1.6364198846485658e-06,
|
| 9147 |
+
"loss": 11.8261,
|
| 9148 |
+
"step": 1301
|
| 9149 |
+
},
|
| 9150 |
+
{
|
| 9151 |
+
"epoch": 0.06482530277947199,
|
| 9152 |
+
"grad_norm": 0.09743479639291763,
|
| 9153 |
+
"learning_rate": 1.5953635515660425e-06,
|
| 9154 |
+
"loss": 11.8416,
|
| 9155 |
+
"step": 1302
|
| 9156 |
+
},
|
| 9157 |
+
{
|
| 9158 |
+
"epoch": 0.06487509179850384,
|
| 9159 |
+
"grad_norm": 0.1907789558172226,
|
| 9160 |
+
"learning_rate": 1.554824675144706e-06,
|
| 9161 |
+
"loss": 11.7865,
|
| 9162 |
+
"step": 1303
|
| 9163 |
+
},
|
| 9164 |
+
{
|
| 9165 |
+
"epoch": 0.0649248808175357,
|
| 9166 |
+
"grad_norm": 0.14809323847293854,
|
| 9167 |
+
"learning_rate": 1.514803468556547e-06,
|
| 9168 |
+
"loss": 11.8262,
|
| 9169 |
+
"step": 1304
|
| 9170 |
+
},
|
| 9171 |
+
{
|
| 9172 |
+
"epoch": 0.06497466983656755,
|
| 9173 |
+
"grad_norm": 0.156307190656662,
|
| 9174 |
+
"learning_rate": 1.4753001422514123e-06,
|
| 9175 |
+
"loss": 11.7988,
|
| 9176 |
+
"step": 1305
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 0.0650244588555994,
|
| 9180 |
+
"grad_norm": 0.16924236714839935,
|
| 9181 |
+
"learning_rate": 1.436314903955871e-06,
|
| 9182 |
+
"loss": 11.7809,
|
| 9183 |
+
"step": 1306
|
| 9184 |
+
},
|
| 9185 |
+
{
|
| 9186 |
+
"epoch": 0.06507424787463124,
|
| 9187 |
+
"grad_norm": 0.1588449329137802,
|
| 9188 |
+
"learning_rate": 1.3978479586721716e-06,
|
| 9189 |
+
"loss": 11.8143,
|
| 9190 |
+
"step": 1307
|
| 9191 |
+
},
|
| 9192 |
+
{
|
| 9193 |
+
"epoch": 0.0651240368936631,
|
| 9194 |
+
"grad_norm": 0.17416319251060486,
|
| 9195 |
+
"learning_rate": 1.3598995086771537e-06,
|
| 9196 |
+
"loss": 11.8016,
|
| 9197 |
+
"step": 1308
|
| 9198 |
+
},
|
| 9199 |
+
{
|
| 9200 |
+
"epoch": 0.06517382591269495,
|
| 9201 |
+
"grad_norm": 0.20937886834144592,
|
| 9202 |
+
"learning_rate": 1.3224697535211162e-06,
|
| 9203 |
+
"loss": 11.7865,
|
| 9204 |
+
"step": 1309
|
| 9205 |
+
},
|
| 9206 |
+
{
|
| 9207 |
+
"epoch": 0.0652236149317268,
|
| 9208 |
+
"grad_norm": 0.27726686000823975,
|
| 9209 |
+
"learning_rate": 1.2855588900269056e-06,
|
| 9210 |
+
"loss": 11.7845,
|
| 9211 |
+
"step": 1310
|
| 9212 |
+
},
|
| 9213 |
+
{
|
| 9214 |
+
"epoch": 0.06527340395075866,
|
| 9215 |
+
"grad_norm": 0.23690152168273926,
|
| 9216 |
+
"learning_rate": 1.2491671122887071e-06,
|
| 9217 |
+
"loss": 11.7492,
|
| 9218 |
+
"step": 1311
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 0.06532319296979051,
|
| 9222 |
+
"grad_norm": 0.2111586034297943,
|
| 9223 |
+
"learning_rate": 1.2132946116711897e-06,
|
| 9224 |
+
"loss": 11.8017,
|
| 9225 |
+
"step": 1312
|
| 9226 |
+
},
|
| 9227 |
+
{
|
| 9228 |
+
"epoch": 0.06537298198882237,
|
| 9229 |
+
"grad_norm": 0.2630767524242401,
|
| 9230 |
+
"learning_rate": 1.1779415768083945e-06,
|
| 9231 |
+
"loss": 11.7456,
|
| 9232 |
+
"step": 1313
|
| 9233 |
+
},
|
| 9234 |
+
{
|
| 9235 |
+
"epoch": 0.06542277100785422,
|
| 9236 |
+
"grad_norm": 0.11112811416387558,
|
| 9237 |
+
"learning_rate": 1.14310819360276e-06,
|
| 9238 |
+
"loss": 11.8243,
|
| 9239 |
+
"step": 1314
|
| 9240 |
+
},
|
| 9241 |
+
{
|
| 9242 |
+
"epoch": 0.06547256002688608,
|
| 9243 |
+
"grad_norm": 0.12407265603542328,
|
| 9244 |
+
"learning_rate": 1.108794645224187e-06,
|
| 9245 |
+
"loss": 11.8119,
|
| 9246 |
+
"step": 1315
|
| 9247 |
+
},
|
| 9248 |
+
{
|
| 9249 |
+
"epoch": 0.06552234904591792,
|
| 9250 |
+
"grad_norm": 0.13975748419761658,
|
| 9251 |
+
"learning_rate": 1.0750011121090308e-06,
|
| 9252 |
+
"loss": 11.8385,
|
| 9253 |
+
"step": 1316
|
| 9254 |
+
},
|
| 9255 |
+
{
|
| 9256 |
+
"epoch": 0.06557213806494977,
|
| 9257 |
+
"grad_norm": 0.17207452654838562,
|
| 9258 |
+
"learning_rate": 1.0417277719591667e-06,
|
| 9259 |
+
"loss": 11.8182,
|
| 9260 |
+
"step": 1317
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 0.06562192708398162,
|
| 9264 |
+
"grad_norm": 0.18585903942584991,
|
| 9265 |
+
"learning_rate": 1.008974799741058e-06,
|
| 9266 |
+
"loss": 11.7639,
|
| 9267 |
+
"step": 1318
|
| 9268 |
+
},
|
| 9269 |
+
{
|
| 9270 |
+
"epoch": 0.06567171610301348,
|
| 9271 |
+
"grad_norm": 0.22873733937740326,
|
| 9272 |
+
"learning_rate": 9.767423676848464e-07,
|
| 9273 |
+
"loss": 11.8054,
|
| 9274 |
+
"step": 1319
|
| 9275 |
+
},
|
| 9276 |
+
{
|
| 9277 |
+
"epoch": 0.06572150512204533,
|
| 9278 |
+
"grad_norm": 0.157411590218544,
|
| 9279 |
+
"learning_rate": 9.450306452834179e-07,
|
| 9280 |
+
"loss": 11.806,
|
| 9281 |
+
"step": 1320
|
| 9282 |
+
},
|
| 9283 |
+
{
|
| 9284 |
+
"epoch": 0.06577129414107719,
|
| 9285 |
+
"grad_norm": 0.22588112950325012,
|
| 9286 |
+
"learning_rate": 9.138397992915493e-07,
|
| 9287 |
+
"loss": 11.7516,
|
| 9288 |
+
"step": 1321
|
| 9289 |
+
},
|
| 9290 |
+
{
|
| 9291 |
+
"epoch": 0.06582108316010904,
|
| 9292 |
+
"grad_norm": 0.2017526626586914,
|
| 9293 |
+
"learning_rate": 8.831699937249859e-07,
|
| 9294 |
+
"loss": 11.8182,
|
| 9295 |
+
"step": 1322
|
| 9296 |
+
},
|
| 9297 |
+
{
|
| 9298 |
+
"epoch": 0.0658708721791409,
|
| 9299 |
+
"grad_norm": 0.14813141524791718,
|
| 9300 |
+
"learning_rate": 8.530213898596317e-07,
|
| 9301 |
+
"loss": 11.8117,
|
| 9302 |
+
"step": 1323
|
| 9303 |
+
},
|
| 9304 |
+
{
|
| 9305 |
+
"epoch": 0.06592066119817275,
|
| 9306 |
+
"grad_norm": 0.14788495004177094,
|
| 9307 |
+
"learning_rate": 8.233941462306271e-07,
|
| 9308 |
+
"loss": 11.8275,
|
| 9309 |
+
"step": 1324
|
| 9310 |
+
},
|
| 9311 |
+
{
|
| 9312 |
+
"epoch": 0.0659704502172046,
|
| 9313 |
+
"grad_norm": 0.16586799919605255,
|
| 9314 |
+
"learning_rate": 7.94288418631639e-07,
|
| 9315 |
+
"loss": 11.8012,
|
| 9316 |
+
"step": 1325
|
| 9317 |
+
},
|
| 9318 |
+
{
|
| 9319 |
+
"epoch": 0.06602023923623644,
|
| 9320 |
+
"grad_norm": 0.26286354660987854,
|
| 9321 |
+
"learning_rate": 7.657043601138835e-07,
|
| 9322 |
+
"loss": 11.7957,
|
| 9323 |
+
"step": 1326
|
| 9324 |
+
},
|
| 9325 |
+
{
|
| 9326 |
+
"epoch": 0.0660700282552683,
|
| 9327 |
+
"grad_norm": 0.30086231231689453,
|
| 9328 |
+
"learning_rate": 7.376421209854267e-07,
|
| 9329 |
+
"loss": 11.7664,
|
| 9330 |
+
"step": 1327
|
| 9331 |
+
},
|
| 9332 |
+
{
|
| 9333 |
+
"epoch": 0.06611981727430015,
|
| 9334 |
+
"grad_norm": 0.20751886069774628,
|
| 9335 |
+
"learning_rate": 7.101018488104072e-07,
|
| 9336 |
+
"loss": 11.7928,
|
| 9337 |
+
"step": 1328
|
| 9338 |
+
},
|
| 9339 |
+
{
|
| 9340 |
+
"epoch": 0.066169606293332,
|
| 9341 |
+
"grad_norm": 0.21468576788902283,
|
| 9342 |
+
"learning_rate": 6.830836884081926e-07,
|
| 9343 |
+
"loss": 11.7963,
|
| 9344 |
+
"step": 1329
|
| 9345 |
+
},
|
| 9346 |
+
{
|
| 9347 |
+
"epoch": 0.06621939531236386,
|
| 9348 |
+
"grad_norm": 0.3652535378932953,
|
| 9349 |
+
"learning_rate": 6.565877818526245e-07,
|
| 9350 |
+
"loss": 11.7536,
|
| 9351 |
+
"step": 1330
|
| 9352 |
+
},
|
| 9353 |
+
{
|
| 9354 |
+
"epoch": 0.06626918433139571,
|
| 9355 |
+
"grad_norm": 0.1696128398180008,
|
| 9356 |
+
"learning_rate": 6.306142684713301e-07,
|
| 9357 |
+
"loss": 11.81,
|
| 9358 |
+
"step": 1331
|
| 9359 |
+
},
|
| 9360 |
+
{
|
| 9361 |
+
"epoch": 0.06631897335042757,
|
| 9362 |
+
"grad_norm": 0.12671110033988953,
|
| 9363 |
+
"learning_rate": 6.051632848449562e-07,
|
| 9364 |
+
"loss": 11.7881,
|
| 9365 |
+
"step": 1332
|
| 9366 |
+
},
|
| 9367 |
+
{
|
| 9368 |
+
"epoch": 0.06636876236945942,
|
| 9369 |
+
"grad_norm": 0.21733596920967102,
|
| 9370 |
+
"learning_rate": 5.802349648064032e-07,
|
| 9371 |
+
"loss": 11.8035,
|
| 9372 |
+
"step": 1333
|
| 9373 |
+
},
|
| 9374 |
+
{
|
| 9375 |
+
"epoch": 0.06641855138849127,
|
| 9376 |
+
"grad_norm": 0.11043595522642136,
|
| 9377 |
+
"learning_rate": 5.558294394402253e-07,
|
| 9378 |
+
"loss": 11.8245,
|
| 9379 |
+
"step": 1334
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 0.06646834040752313,
|
| 9383 |
+
"grad_norm": 0.27618443965911865,
|
| 9384 |
+
"learning_rate": 5.319468370818537e-07,
|
| 9385 |
+
"loss": 11.7955,
|
| 9386 |
+
"step": 1335
|
| 9387 |
+
},
|
| 9388 |
+
{
|
| 9389 |
+
"epoch": 0.06651812942655497,
|
| 9390 |
+
"grad_norm": 0.1568264663219452,
|
| 9391 |
+
"learning_rate": 5.085872833169414e-07,
|
| 9392 |
+
"loss": 11.8012,
|
| 9393 |
+
"step": 1336
|
| 9394 |
+
},
|
| 9395 |
+
{
|
| 9396 |
+
"epoch": 0.06656791844558682,
|
| 9397 |
+
"grad_norm": 0.2868306636810303,
|
| 9398 |
+
"learning_rate": 4.857509009807304e-07,
|
| 9399 |
+
"loss": 11.7369,
|
| 9400 |
+
"step": 1337
|
| 9401 |
+
},
|
| 9402 |
+
{
|
| 9403 |
+
"epoch": 0.06661770746461868,
|
| 9404 |
+
"grad_norm": 0.1636037677526474,
|
| 9405 |
+
"learning_rate": 4.634378101573855e-07,
|
| 9406 |
+
"loss": 11.8026,
|
| 9407 |
+
"step": 1338
|
| 9408 |
+
},
|
| 9409 |
+
{
|
| 9410 |
+
"epoch": 0.06666749648365053,
|
| 9411 |
+
"grad_norm": 0.1233348697423935,
|
| 9412 |
+
"learning_rate": 4.416481281793394e-07,
|
| 9413 |
+
"loss": 11.8092,
|
| 9414 |
+
"step": 1339
|
| 9415 |
+
},
|
| 9416 |
+
{
|
| 9417 |
+
"epoch": 0.06671728550268238,
|
| 9418 |
+
"grad_norm": 0.16717824339866638,
|
| 9419 |
+
"learning_rate": 4.203819696267486e-07,
|
| 9420 |
+
"loss": 11.8014,
|
| 9421 |
+
"step": 1340
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 0.06676707452171424,
|
| 9425 |
+
"grad_norm": 0.2801608741283417,
|
| 9426 |
+
"learning_rate": 3.9963944632681607e-07,
|
| 9427 |
+
"loss": 11.7616,
|
| 9428 |
+
"step": 1341
|
| 9429 |
+
},
|
| 9430 |
+
{
|
| 9431 |
+
"epoch": 0.06681686354074609,
|
| 9432 |
+
"grad_norm": 0.15805502235889435,
|
| 9433 |
+
"learning_rate": 3.7942066735321414e-07,
|
| 9434 |
+
"loss": 11.7871,
|
| 9435 |
+
"step": 1342
|
| 9436 |
+
},
|
| 9437 |
+
{
|
| 9438 |
+
"epoch": 0.06686665255977794,
|
| 9439 |
+
"grad_norm": 0.23729799687862396,
|
| 9440 |
+
"learning_rate": 3.597257390255515e-07,
|
| 9441 |
+
"loss": 11.7025,
|
| 9442 |
+
"step": 1343
|
| 9443 |
+
},
|
| 9444 |
+
{
|
| 9445 |
+
"epoch": 0.0669164415788098,
|
| 9446 |
+
"grad_norm": 0.22626511752605438,
|
| 9447 |
+
"learning_rate": 3.405547649087959e-07,
|
| 9448 |
+
"loss": 11.756,
|
| 9449 |
+
"step": 1344
|
| 9450 |
+
},
|
| 9451 |
+
{
|
| 9452 |
+
"epoch": 0.06696623059784165,
|
| 9453 |
+
"grad_norm": 0.22312688827514648,
|
| 9454 |
+
"learning_rate": 3.2190784581270786e-07,
|
| 9455 |
+
"loss": 11.7834,
|
| 9456 |
+
"step": 1345
|
| 9457 |
+
},
|
| 9458 |
+
{
|
| 9459 |
+
"epoch": 0.06701601961687349,
|
| 9460 |
+
"grad_norm": 0.14723101258277893,
|
| 9461 |
+
"learning_rate": 3.0378507979131886e-07,
|
| 9462 |
+
"loss": 11.8125,
|
| 9463 |
+
"step": 1346
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 0.06706580863590535,
|
| 9467 |
+
"grad_norm": 0.20397256314754486,
|
| 9468 |
+
"learning_rate": 2.861865621424431e-07,
|
| 9469 |
+
"loss": 11.7416,
|
| 9470 |
+
"step": 1347
|
| 9471 |
+
},
|
| 9472 |
+
{
|
| 9473 |
+
"epoch": 0.0671155976549372,
|
| 9474 |
+
"grad_norm": 0.17452247440814972,
|
| 9475 |
+
"learning_rate": 2.691123854071553e-07,
|
| 9476 |
+
"loss": 11.8153,
|
| 9477 |
+
"step": 1348
|
| 9478 |
+
},
|
| 9479 |
+
{
|
| 9480 |
+
"epoch": 0.06716538667396905,
|
| 9481 |
+
"grad_norm": 0.18444041907787323,
|
| 9482 |
+
"learning_rate": 2.5256263936929146e-07,
|
| 9483 |
+
"loss": 11.7963,
|
| 9484 |
+
"step": 1349
|
| 9485 |
+
},
|
| 9486 |
+
{
|
| 9487 |
+
"epoch": 0.06721517569300091,
|
| 9488 |
+
"grad_norm": 0.16513411700725555,
|
| 9489 |
+
"learning_rate": 2.3653741105499338e-07,
|
| 9490 |
+
"loss": 11.8282,
|
| 9491 |
+
"step": 1350
|
| 9492 |
+
},
|
| 9493 |
+
{
|
| 9494 |
+
"epoch": 0.06726496471203276,
|
| 9495 |
+
"grad_norm": 0.16867300868034363,
|
| 9496 |
+
"learning_rate": 2.2103678473226475e-07,
|
| 9497 |
+
"loss": 11.7918,
|
| 9498 |
+
"step": 1351
|
| 9499 |
+
},
|
| 9500 |
+
{
|
| 9501 |
+
"epoch": 0.06731475373106462,
|
| 9502 |
+
"grad_norm": 0.204188272356987,
|
| 9503 |
+
"learning_rate": 2.060608419105048e-07,
|
| 9504 |
+
"loss": 11.8033,
|
| 9505 |
+
"step": 1352
|
| 9506 |
+
},
|
| 9507 |
+
{
|
| 9508 |
+
"epoch": 0.06736454275009647,
|
| 9509 |
+
"grad_norm": 0.2502548396587372,
|
| 9510 |
+
"learning_rate": 1.916096613400642e-07,
|
| 9511 |
+
"loss": 11.7956,
|
| 9512 |
+
"step": 1353
|
| 9513 |
+
},
|
| 9514 |
+
{
|
| 9515 |
+
"epoch": 0.06741433176912832,
|
| 9516 |
+
"grad_norm": 0.40772634744644165,
|
| 9517 |
+
"learning_rate": 1.7768331901187875e-07,
|
| 9518 |
+
"loss": 11.7529,
|
| 9519 |
+
"step": 1354
|
| 9520 |
+
},
|
| 9521 |
+
{
|
| 9522 |
+
"epoch": 0.06746412078816016,
|
| 9523 |
+
"grad_norm": 0.47658059000968933,
|
| 9524 |
+
"learning_rate": 1.6428188815703626e-07,
|
| 9525 |
+
"loss": 11.7601,
|
| 9526 |
+
"step": 1355
|
| 9527 |
+
},
|
| 9528 |
+
{
|
| 9529 |
+
"epoch": 0.06751390980719202,
|
| 9530 |
+
"grad_norm": 0.16059927642345428,
|
| 9531 |
+
"learning_rate": 1.5140543924639927e-07,
|
| 9532 |
+
"loss": 11.816,
|
| 9533 |
+
"step": 1356
|
| 9534 |
+
},
|
| 9535 |
+
{
|
| 9536 |
+
"epoch": 0.06756369882622387,
|
| 9537 |
+
"grad_norm": 0.22693295776844025,
|
| 9538 |
+
"learning_rate": 1.3905403999024957e-07,
|
| 9539 |
+
"loss": 11.8014,
|
| 9540 |
+
"step": 1357
|
| 9541 |
+
},
|
| 9542 |
+
{
|
| 9543 |
+
"epoch": 0.06761348784525573,
|
| 9544 |
+
"grad_norm": 0.19551688432693481,
|
| 9545 |
+
"learning_rate": 1.2722775533787757e-07,
|
| 9546 |
+
"loss": 11.7913,
|
| 9547 |
+
"step": 1358
|
| 9548 |
+
},
|
| 9549 |
+
{
|
| 9550 |
+
"epoch": 0.06766327686428758,
|
| 9551 |
+
"grad_norm": 0.17800608277320862,
|
| 9552 |
+
"learning_rate": 1.1592664747731574e-07,
|
| 9553 |
+
"loss": 11.7962,
|
| 9554 |
+
"step": 1359
|
| 9555 |
+
},
|
| 9556 |
+
{
|
| 9557 |
+
"epoch": 0.06771306588331943,
|
| 9558 |
+
"grad_norm": 0.16557064652442932,
|
| 9559 |
+
"learning_rate": 1.0515077583498344e-07,
|
| 9560 |
+
"loss": 11.8158,
|
| 9561 |
+
"step": 1360
|
| 9562 |
+
},
|
| 9563 |
+
{
|
| 9564 |
+
"epoch": 0.06776285490235129,
|
| 9565 |
+
"grad_norm": 0.12334538251161575,
|
| 9566 |
+
"learning_rate": 9.49001970753316e-08,
|
| 9567 |
+
"loss": 11.8347,
|
| 9568 |
+
"step": 1361
|
| 9569 |
+
},
|
| 9570 |
+
{
|
| 9571 |
+
"epoch": 0.06781264392138314,
|
| 9572 |
+
"grad_norm": 0.12242874503135681,
|
| 9573 |
+
"learning_rate": 8.517496510059841e-08,
|
| 9574 |
+
"loss": 11.8004,
|
| 9575 |
+
"step": 1362
|
| 9576 |
+
},
|
| 9577 |
+
{
|
| 9578 |
+
"epoch": 0.067862432940415,
|
| 9579 |
+
"grad_norm": 0.13858723640441895,
|
| 9580 |
+
"learning_rate": 7.597513105052079e-08,
|
| 9581 |
+
"loss": 11.8181,
|
| 9582 |
+
"step": 1363
|
| 9583 |
+
},
|
| 9584 |
+
{
|
| 9585 |
+
"epoch": 0.06791222195944685,
|
| 9586 |
+
"grad_norm": 0.14237730205059052,
|
| 9587 |
+
"learning_rate": 6.730074330203451e-08,
|
| 9588 |
+
"loss": 11.8146,
|
| 9589 |
+
"step": 1364
|
| 9590 |
+
},
|
| 9591 |
+
{
|
| 9592 |
+
"epoch": 0.06796201097847869,
|
| 9593 |
+
"grad_norm": 0.13338276743888855,
|
| 9594 |
+
"learning_rate": 5.915184746904112e-08,
|
| 9595 |
+
"loss": 11.8142,
|
| 9596 |
+
"step": 1365
|
| 9597 |
+
},
|
| 9598 |
+
{
|
| 9599 |
+
"epoch": 0.06801179999751054,
|
| 9600 |
+
"grad_norm": 0.19225336611270905,
|
| 9601 |
+
"learning_rate": 5.152848640218588e-08,
|
| 9602 |
+
"loss": 11.7898,
|
| 9603 |
+
"step": 1366
|
| 9604 |
+
},
|
| 9605 |
+
{
|
| 9606 |
+
"epoch": 0.0680615890165424,
|
| 9607 |
+
"grad_norm": 0.2177787572145462,
|
| 9608 |
+
"learning_rate": 4.4430700188569095e-08,
|
| 9609 |
+
"loss": 11.7806,
|
| 9610 |
+
"step": 1367
|
| 9611 |
+
},
|
| 9612 |
+
{
|
| 9613 |
+
"epoch": 0.06811137803557425,
|
| 9614 |
+
"grad_norm": 0.156807541847229,
|
| 9615 |
+
"learning_rate": 3.785852615161289e-08,
|
| 9616 |
+
"loss": 11.8281,
|
| 9617 |
+
"step": 1368
|
| 9618 |
+
},
|
| 9619 |
+
{
|
| 9620 |
+
"epoch": 0.0681611670546061,
|
| 9621 |
+
"grad_norm": 0.15858221054077148,
|
| 9622 |
+
"learning_rate": 3.1811998850828085e-08,
|
| 9623 |
+
"loss": 11.7957,
|
| 9624 |
+
"step": 1369
|
| 9625 |
+
},
|
| 9626 |
+
{
|
| 9627 |
+
"epoch": 0.06821095607363796,
|
| 9628 |
+
"grad_norm": 0.18428069353103638,
|
| 9629 |
+
"learning_rate": 2.6291150081603212e-08,
|
| 9630 |
+
"loss": 11.7666,
|
| 9631 |
+
"step": 1370
|
| 9632 |
+
},
|
| 9633 |
+
{
|
| 9634 |
+
"epoch": 0.06826074509266981,
|
| 9635 |
+
"grad_norm": 0.10750476270914078,
|
| 9636 |
+
"learning_rate": 2.1296008875115736e-08,
|
| 9637 |
+
"loss": 11.8193,
|
| 9638 |
+
"step": 1371
|
| 9639 |
+
},
|
| 9640 |
+
{
|
| 9641 |
+
"epoch": 0.06831053411170167,
|
| 9642 |
+
"grad_norm": 0.12931814789772034,
|
| 9643 |
+
"learning_rate": 1.6826601498098894e-08,
|
| 9644 |
+
"loss": 11.8436,
|
| 9645 |
+
"step": 1372
|
| 9646 |
+
},
|
| 9647 |
+
{
|
| 9648 |
+
"epoch": 0.06836032313073352,
|
| 9649 |
+
"grad_norm": 0.15376867353916168,
|
| 9650 |
+
"learning_rate": 1.2882951452730663e-08,
|
| 9651 |
+
"loss": 11.8359,
|
| 9652 |
+
"step": 1373
|
| 9653 |
+
},
|
| 9654 |
+
{
|
| 9655 |
+
"epoch": 0.06841011214976538,
|
| 9656 |
+
"grad_norm": 0.21058931946754456,
|
| 9657 |
+
"learning_rate": 9.46507947655606e-09,
|
| 9658 |
+
"loss": 11.7516,
|
| 9659 |
+
"step": 1374
|
| 9660 |
+
},
|
| 9661 |
+
{
|
| 9662 |
+
"epoch": 0.06845990116879722,
|
| 9663 |
+
"grad_norm": 0.21048632264137268,
|
| 9664 |
+
"learning_rate": 6.57300354227619e-09,
|
| 9665 |
+
"loss": 11.824,
|
| 9666 |
+
"step": 1375
|
| 9667 |
+
},
|
| 9668 |
+
{
|
| 9669 |
+
"epoch": 0.06850969018782907,
|
| 9670 |
+
"grad_norm": 0.1717846691608429,
|
| 9671 |
+
"learning_rate": 4.2067388577815556e-09,
|
| 9672 |
+
"loss": 11.8108,
|
| 9673 |
+
"step": 1376
|
| 9674 |
+
},
|
| 9675 |
+
{
|
| 9676 |
+
"epoch": 0.06855947920686092,
|
| 9677 |
+
"grad_norm": 0.22130613029003143,
|
| 9678 |
+
"learning_rate": 2.3662978659633183e-09,
|
| 9679 |
+
"loss": 11.7488,
|
| 9680 |
+
"step": 1377
|
| 9681 |
+
},
|
| 9682 |
+
{
|
| 9683 |
+
"epoch": 0.06860926822589278,
|
| 9684 |
+
"grad_norm": 0.14033807814121246,
|
| 9685 |
+
"learning_rate": 1.0516902447132993e-09,
|
| 9686 |
+
"loss": 11.8016,
|
| 9687 |
+
"step": 1378
|
| 9688 |
+
},
|
| 9689 |
+
{
|
| 9690 |
+
"epoch": 0.06865905724492463,
|
| 9691 |
+
"grad_norm": 0.14432686567306519,
|
| 9692 |
+
"learning_rate": 2.6292290682405906e-10,
|
| 9693 |
+
"loss": 11.8209,
|
| 9694 |
+
"step": 1379
|
| 9695 |
+
},
|
| 9696 |
+
{
|
| 9697 |
+
"epoch": 0.06870884626395649,
|
| 9698 |
+
"grad_norm": 0.1552795022726059,
|
| 9699 |
+
"learning_rate": 0.0,
|
| 9700 |
+
"loss": 11.7903,
|
| 9701 |
+
"step": 1380
|
| 9702 |
+
},
|
| 9703 |
+
{
|
| 9704 |
+
"epoch": 0.06870884626395649,
|
| 9705 |
+
"eval_loss": 11.77357292175293,
|
| 9706 |
+
"eval_runtime": 114.9924,
|
| 9707 |
+
"eval_samples_per_second": 73.544,
|
| 9708 |
+
"eval_steps_per_second": 36.776,
|
| 9709 |
+
"step": 1380
|
| 9710 |
}
|
| 9711 |
],
|
| 9712 |
"logging_steps": 1,
|
|
|
|
| 9721 |
"should_evaluate": false,
|
| 9722 |
"should_log": false,
|
| 9723 |
"should_save": true,
|
| 9724 |
+
"should_training_stop": true
|
| 9725 |
},
|
| 9726 |
"attributes": {}
|
| 9727 |
}
|
| 9728 |
},
|
| 9729 |
+
"total_flos": 188203236065280.0,
|
| 9730 |
"train_batch_size": 2,
|
| 9731 |
"trial_name": null,
|
| 9732 |
"trial_params": null
|