Training in progress, step 696, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 17640136
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9014321f8bcf9f44983d9975832a7418df2015bd30752ea0042a7dcf497b8ea6
|
| 3 |
size 17640136
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 9569204
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1d8857e3aaa9169a3c0e4683a41635f3737de799279d38939c3bfa0a3dcc8ca5
|
| 3 |
size 9569204
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:773acc43cd5f12df5c8f50b405d38c0441a8e728b81cc3424dd55c96c189a86e
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fb1e581c12cfae174281c6a038ddd6d82dac2f093e3546871b673709d6c0695e
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 348,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2459,6 +2459,2450 @@
|
|
| 2459 |
"eval_samples_per_second": 45.023,
|
| 2460 |
"eval_steps_per_second": 22.511,
|
| 2461 |
"step": 348
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2462 |
}
|
| 2463 |
],
|
| 2464 |
"logging_steps": 1,
|
|
@@ -2478,7 +4922,7 @@
|
|
| 2478 |
"attributes": {}
|
| 2479 |
}
|
| 2480 |
},
|
| 2481 |
-
"total_flos":
|
| 2482 |
"train_batch_size": 2,
|
| 2483 |
"trial_name": null,
|
| 2484 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.3960170697012802,
|
| 5 |
"eval_steps": 348,
|
| 6 |
+
"global_step": 696,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2459 |
"eval_samples_per_second": 45.023,
|
| 2460 |
"eval_steps_per_second": 22.511,
|
| 2461 |
"step": 348
|
| 2462 |
+
},
|
| 2463 |
+
{
|
| 2464 |
+
"epoch": 0.19857752489331437,
|
| 2465 |
+
"grad_norm": 0.5306143164634705,
|
| 2466 |
+
"learning_rate": 0.00017174773806287496,
|
| 2467 |
+
"loss": 1.5776,
|
| 2468 |
+
"step": 349
|
| 2469 |
+
},
|
| 2470 |
+
{
|
| 2471 |
+
"epoch": 0.19914651493598862,
|
| 2472 |
+
"grad_norm": 0.5569584369659424,
|
| 2473 |
+
"learning_rate": 0.00017158920431940117,
|
| 2474 |
+
"loss": 1.5926,
|
| 2475 |
+
"step": 350
|
| 2476 |
+
},
|
| 2477 |
+
{
|
| 2478 |
+
"epoch": 0.19971550497866286,
|
| 2479 |
+
"grad_norm": 0.5538038611412048,
|
| 2480 |
+
"learning_rate": 0.0001714303006361697,
|
| 2481 |
+
"loss": 1.6146,
|
| 2482 |
+
"step": 351
|
| 2483 |
+
},
|
| 2484 |
+
{
|
| 2485 |
+
"epoch": 0.20028449502133713,
|
| 2486 |
+
"grad_norm": 0.5369197130203247,
|
| 2487 |
+
"learning_rate": 0.00017127102783432097,
|
| 2488 |
+
"loss": 1.514,
|
| 2489 |
+
"step": 352
|
| 2490 |
+
},
|
| 2491 |
+
{
|
| 2492 |
+
"epoch": 0.20085348506401138,
|
| 2493 |
+
"grad_norm": 0.6111621856689453,
|
| 2494 |
+
"learning_rate": 0.00017111138673690283,
|
| 2495 |
+
"loss": 1.3508,
|
| 2496 |
+
"step": 353
|
| 2497 |
+
},
|
| 2498 |
+
{
|
| 2499 |
+
"epoch": 0.20142247510668562,
|
| 2500 |
+
"grad_norm": 0.5350061655044556,
|
| 2501 |
+
"learning_rate": 0.0001709513781688664,
|
| 2502 |
+
"loss": 1.5506,
|
| 2503 |
+
"step": 354
|
| 2504 |
+
},
|
| 2505 |
+
{
|
| 2506 |
+
"epoch": 0.2019914651493599,
|
| 2507 |
+
"grad_norm": 0.5226223468780518,
|
| 2508 |
+
"learning_rate": 0.00017079100295706154,
|
| 2509 |
+
"loss": 1.55,
|
| 2510 |
+
"step": 355
|
| 2511 |
+
},
|
| 2512 |
+
{
|
| 2513 |
+
"epoch": 0.20256045519203414,
|
| 2514 |
+
"grad_norm": 0.5834634304046631,
|
| 2515 |
+
"learning_rate": 0.0001706302619302329,
|
| 2516 |
+
"loss": 1.6025,
|
| 2517 |
+
"step": 356
|
| 2518 |
+
},
|
| 2519 |
+
{
|
| 2520 |
+
"epoch": 0.20312944523470838,
|
| 2521 |
+
"grad_norm": 0.564756453037262,
|
| 2522 |
+
"learning_rate": 0.0001704691559190155,
|
| 2523 |
+
"loss": 1.5174,
|
| 2524 |
+
"step": 357
|
| 2525 |
+
},
|
| 2526 |
+
{
|
| 2527 |
+
"epoch": 0.20369843527738266,
|
| 2528 |
+
"grad_norm": 0.5217262506484985,
|
| 2529 |
+
"learning_rate": 0.00017030768575593025,
|
| 2530 |
+
"loss": 1.4321,
|
| 2531 |
+
"step": 358
|
| 2532 |
+
},
|
| 2533 |
+
{
|
| 2534 |
+
"epoch": 0.2042674253200569,
|
| 2535 |
+
"grad_norm": 0.5270060896873474,
|
| 2536 |
+
"learning_rate": 0.0001701458522753801,
|
| 2537 |
+
"loss": 1.6006,
|
| 2538 |
+
"step": 359
|
| 2539 |
+
},
|
| 2540 |
+
{
|
| 2541 |
+
"epoch": 0.20483641536273114,
|
| 2542 |
+
"grad_norm": 0.5722881555557251,
|
| 2543 |
+
"learning_rate": 0.00016998365631364527,
|
| 2544 |
+
"loss": 1.7025,
|
| 2545 |
+
"step": 360
|
| 2546 |
+
},
|
| 2547 |
+
{
|
| 2548 |
+
"epoch": 0.20540540540540542,
|
| 2549 |
+
"grad_norm": 0.5267907977104187,
|
| 2550 |
+
"learning_rate": 0.00016982109870887908,
|
| 2551 |
+
"loss": 1.5108,
|
| 2552 |
+
"step": 361
|
| 2553 |
+
},
|
| 2554 |
+
{
|
| 2555 |
+
"epoch": 0.20597439544807966,
|
| 2556 |
+
"grad_norm": 0.5428017973899841,
|
| 2557 |
+
"learning_rate": 0.00016965818030110382,
|
| 2558 |
+
"loss": 1.6343,
|
| 2559 |
+
"step": 362
|
| 2560 |
+
},
|
| 2561 |
+
{
|
| 2562 |
+
"epoch": 0.2065433854907539,
|
| 2563 |
+
"grad_norm": 0.5151480436325073,
|
| 2564 |
+
"learning_rate": 0.0001694949019322061,
|
| 2565 |
+
"loss": 1.5242,
|
| 2566 |
+
"step": 363
|
| 2567 |
+
},
|
| 2568 |
+
{
|
| 2569 |
+
"epoch": 0.20711237553342818,
|
| 2570 |
+
"grad_norm": 0.5217251181602478,
|
| 2571 |
+
"learning_rate": 0.00016933126444593273,
|
| 2572 |
+
"loss": 1.54,
|
| 2573 |
+
"step": 364
|
| 2574 |
+
},
|
| 2575 |
+
{
|
| 2576 |
+
"epoch": 0.20768136557610242,
|
| 2577 |
+
"grad_norm": 0.5215661525726318,
|
| 2578 |
+
"learning_rate": 0.00016916726868788622,
|
| 2579 |
+
"loss": 1.5131,
|
| 2580 |
+
"step": 365
|
| 2581 |
+
},
|
| 2582 |
+
{
|
| 2583 |
+
"epoch": 0.20825035561877667,
|
| 2584 |
+
"grad_norm": 0.5087475776672363,
|
| 2585 |
+
"learning_rate": 0.00016900291550552048,
|
| 2586 |
+
"loss": 1.6782,
|
| 2587 |
+
"step": 366
|
| 2588 |
+
},
|
| 2589 |
+
{
|
| 2590 |
+
"epoch": 0.2088193456614509,
|
| 2591 |
+
"grad_norm": 0.5366347432136536,
|
| 2592 |
+
"learning_rate": 0.0001688382057481364,
|
| 2593 |
+
"loss": 1.5821,
|
| 2594 |
+
"step": 367
|
| 2595 |
+
},
|
| 2596 |
+
{
|
| 2597 |
+
"epoch": 0.20938833570412518,
|
| 2598 |
+
"grad_norm": 0.5469174385070801,
|
| 2599 |
+
"learning_rate": 0.00016867314026687753,
|
| 2600 |
+
"loss": 1.8795,
|
| 2601 |
+
"step": 368
|
| 2602 |
+
},
|
| 2603 |
+
{
|
| 2604 |
+
"epoch": 0.20995732574679943,
|
| 2605 |
+
"grad_norm": 0.5702829957008362,
|
| 2606 |
+
"learning_rate": 0.00016850771991472563,
|
| 2607 |
+
"loss": 1.4382,
|
| 2608 |
+
"step": 369
|
| 2609 |
+
},
|
| 2610 |
+
{
|
| 2611 |
+
"epoch": 0.21052631578947367,
|
| 2612 |
+
"grad_norm": 0.5792803764343262,
|
| 2613 |
+
"learning_rate": 0.0001683419455464962,
|
| 2614 |
+
"loss": 1.6934,
|
| 2615 |
+
"step": 370
|
| 2616 |
+
},
|
| 2617 |
+
{
|
| 2618 |
+
"epoch": 0.21109530583214794,
|
| 2619 |
+
"grad_norm": 0.5423445701599121,
|
| 2620 |
+
"learning_rate": 0.0001681758180188342,
|
| 2621 |
+
"loss": 1.5408,
|
| 2622 |
+
"step": 371
|
| 2623 |
+
},
|
| 2624 |
+
{
|
| 2625 |
+
"epoch": 0.2116642958748222,
|
| 2626 |
+
"grad_norm": 0.5211445093154907,
|
| 2627 |
+
"learning_rate": 0.00016800933819020956,
|
| 2628 |
+
"loss": 1.5354,
|
| 2629 |
+
"step": 372
|
| 2630 |
+
},
|
| 2631 |
+
{
|
| 2632 |
+
"epoch": 0.21223328591749643,
|
| 2633 |
+
"grad_norm": 0.5631567239761353,
|
| 2634 |
+
"learning_rate": 0.0001678425069209127,
|
| 2635 |
+
"loss": 1.6356,
|
| 2636 |
+
"step": 373
|
| 2637 |
+
},
|
| 2638 |
+
{
|
| 2639 |
+
"epoch": 0.2128022759601707,
|
| 2640 |
+
"grad_norm": 0.5736171007156372,
|
| 2641 |
+
"learning_rate": 0.0001676753250730501,
|
| 2642 |
+
"loss": 1.6202,
|
| 2643 |
+
"step": 374
|
| 2644 |
+
},
|
| 2645 |
+
{
|
| 2646 |
+
"epoch": 0.21337126600284495,
|
| 2647 |
+
"grad_norm": 0.5194095373153687,
|
| 2648 |
+
"learning_rate": 0.00016750779351053994,
|
| 2649 |
+
"loss": 1.4419,
|
| 2650 |
+
"step": 375
|
| 2651 |
+
},
|
| 2652 |
+
{
|
| 2653 |
+
"epoch": 0.2139402560455192,
|
| 2654 |
+
"grad_norm": 0.5220928192138672,
|
| 2655 |
+
"learning_rate": 0.0001673399130991075,
|
| 2656 |
+
"loss": 1.4182,
|
| 2657 |
+
"step": 376
|
| 2658 |
+
},
|
| 2659 |
+
{
|
| 2660 |
+
"epoch": 0.21450924608819347,
|
| 2661 |
+
"grad_norm": 0.5223848819732666,
|
| 2662 |
+
"learning_rate": 0.00016717168470628077,
|
| 2663 |
+
"loss": 1.5831,
|
| 2664 |
+
"step": 377
|
| 2665 |
+
},
|
| 2666 |
+
{
|
| 2667 |
+
"epoch": 0.2150782361308677,
|
| 2668 |
+
"grad_norm": 0.5400263071060181,
|
| 2669 |
+
"learning_rate": 0.00016700310920138596,
|
| 2670 |
+
"loss": 1.579,
|
| 2671 |
+
"step": 378
|
| 2672 |
+
},
|
| 2673 |
+
{
|
| 2674 |
+
"epoch": 0.21564722617354196,
|
| 2675 |
+
"grad_norm": 0.5276429653167725,
|
| 2676 |
+
"learning_rate": 0.00016683418745554299,
|
| 2677 |
+
"loss": 1.4674,
|
| 2678 |
+
"step": 379
|
| 2679 |
+
},
|
| 2680 |
+
{
|
| 2681 |
+
"epoch": 0.21621621621621623,
|
| 2682 |
+
"grad_norm": 0.5498270392417908,
|
| 2683 |
+
"learning_rate": 0.000166664920341661,
|
| 2684 |
+
"loss": 1.8171,
|
| 2685 |
+
"step": 380
|
| 2686 |
+
},
|
| 2687 |
+
{
|
| 2688 |
+
"epoch": 0.21678520625889047,
|
| 2689 |
+
"grad_norm": 0.5207138657569885,
|
| 2690 |
+
"learning_rate": 0.00016649530873443375,
|
| 2691 |
+
"loss": 1.3337,
|
| 2692 |
+
"step": 381
|
| 2693 |
+
},
|
| 2694 |
+
{
|
| 2695 |
+
"epoch": 0.21735419630156472,
|
| 2696 |
+
"grad_norm": 0.5555972456932068,
|
| 2697 |
+
"learning_rate": 0.00016632535351033533,
|
| 2698 |
+
"loss": 1.5634,
|
| 2699 |
+
"step": 382
|
| 2700 |
+
},
|
| 2701 |
+
{
|
| 2702 |
+
"epoch": 0.217923186344239,
|
| 2703 |
+
"grad_norm": 0.5569733381271362,
|
| 2704 |
+
"learning_rate": 0.00016615505554761533,
|
| 2705 |
+
"loss": 1.6649,
|
| 2706 |
+
"step": 383
|
| 2707 |
+
},
|
| 2708 |
+
{
|
| 2709 |
+
"epoch": 0.21849217638691323,
|
| 2710 |
+
"grad_norm": 0.5526515245437622,
|
| 2711 |
+
"learning_rate": 0.00016598441572629458,
|
| 2712 |
+
"loss": 1.2708,
|
| 2713 |
+
"step": 384
|
| 2714 |
+
},
|
| 2715 |
+
{
|
| 2716 |
+
"epoch": 0.21906116642958748,
|
| 2717 |
+
"grad_norm": 0.5405237674713135,
|
| 2718 |
+
"learning_rate": 0.0001658134349281604,
|
| 2719 |
+
"loss": 1.5085,
|
| 2720 |
+
"step": 385
|
| 2721 |
+
},
|
| 2722 |
+
{
|
| 2723 |
+
"epoch": 0.21963015647226172,
|
| 2724 |
+
"grad_norm": 0.5164327621459961,
|
| 2725 |
+
"learning_rate": 0.00016564211403676213,
|
| 2726 |
+
"loss": 1.4096,
|
| 2727 |
+
"step": 386
|
| 2728 |
+
},
|
| 2729 |
+
{
|
| 2730 |
+
"epoch": 0.220199146514936,
|
| 2731 |
+
"grad_norm": 0.535915195941925,
|
| 2732 |
+
"learning_rate": 0.0001654704539374066,
|
| 2733 |
+
"loss": 1.5407,
|
| 2734 |
+
"step": 387
|
| 2735 |
+
},
|
| 2736 |
+
{
|
| 2737 |
+
"epoch": 0.22076813655761024,
|
| 2738 |
+
"grad_norm": 0.5589139461517334,
|
| 2739 |
+
"learning_rate": 0.0001652984555171534,
|
| 2740 |
+
"loss": 1.5837,
|
| 2741 |
+
"step": 388
|
| 2742 |
+
},
|
| 2743 |
+
{
|
| 2744 |
+
"epoch": 0.22133712660028448,
|
| 2745 |
+
"grad_norm": 0.5141209959983826,
|
| 2746 |
+
"learning_rate": 0.00016512611966481056,
|
| 2747 |
+
"loss": 1.377,
|
| 2748 |
+
"step": 389
|
| 2749 |
+
},
|
| 2750 |
+
{
|
| 2751 |
+
"epoch": 0.22190611664295876,
|
| 2752 |
+
"grad_norm": 0.514789879322052,
|
| 2753 |
+
"learning_rate": 0.00016495344727092973,
|
| 2754 |
+
"loss": 1.5191,
|
| 2755 |
+
"step": 390
|
| 2756 |
+
},
|
| 2757 |
+
{
|
| 2758 |
+
"epoch": 0.222475106685633,
|
| 2759 |
+
"grad_norm": 0.5353395342826843,
|
| 2760 |
+
"learning_rate": 0.00016478043922780157,
|
| 2761 |
+
"loss": 1.5026,
|
| 2762 |
+
"step": 391
|
| 2763 |
+
},
|
| 2764 |
+
{
|
| 2765 |
+
"epoch": 0.22304409672830725,
|
| 2766 |
+
"grad_norm": 0.5318089127540588,
|
| 2767 |
+
"learning_rate": 0.00016460709642945133,
|
| 2768 |
+
"loss": 1.5277,
|
| 2769 |
+
"step": 392
|
| 2770 |
+
},
|
| 2771 |
+
{
|
| 2772 |
+
"epoch": 0.22361308677098152,
|
| 2773 |
+
"grad_norm": 0.5722904205322266,
|
| 2774 |
+
"learning_rate": 0.00016443341977163408,
|
| 2775 |
+
"loss": 1.3433,
|
| 2776 |
+
"step": 393
|
| 2777 |
+
},
|
| 2778 |
+
{
|
| 2779 |
+
"epoch": 0.22418207681365576,
|
| 2780 |
+
"grad_norm": 0.542008101940155,
|
| 2781 |
+
"learning_rate": 0.0001642594101518301,
|
| 2782 |
+
"loss": 1.5241,
|
| 2783 |
+
"step": 394
|
| 2784 |
+
},
|
| 2785 |
+
{
|
| 2786 |
+
"epoch": 0.22475106685633,
|
| 2787 |
+
"grad_norm": 0.5351589918136597,
|
| 2788 |
+
"learning_rate": 0.00016408506846924035,
|
| 2789 |
+
"loss": 1.6335,
|
| 2790 |
+
"step": 395
|
| 2791 |
+
},
|
| 2792 |
+
{
|
| 2793 |
+
"epoch": 0.22532005689900428,
|
| 2794 |
+
"grad_norm": 0.5150931477546692,
|
| 2795 |
+
"learning_rate": 0.00016391039562478157,
|
| 2796 |
+
"loss": 1.5412,
|
| 2797 |
+
"step": 396
|
| 2798 |
+
},
|
| 2799 |
+
{
|
| 2800 |
+
"epoch": 0.22588904694167852,
|
| 2801 |
+
"grad_norm": 0.5498356819152832,
|
| 2802 |
+
"learning_rate": 0.00016373539252108202,
|
| 2803 |
+
"loss": 1.5062,
|
| 2804 |
+
"step": 397
|
| 2805 |
+
},
|
| 2806 |
+
{
|
| 2807 |
+
"epoch": 0.22645803698435277,
|
| 2808 |
+
"grad_norm": 0.5373052358627319,
|
| 2809 |
+
"learning_rate": 0.0001635600600624763,
|
| 2810 |
+
"loss": 1.6658,
|
| 2811 |
+
"step": 398
|
| 2812 |
+
},
|
| 2813 |
+
{
|
| 2814 |
+
"epoch": 0.22702702702702704,
|
| 2815 |
+
"grad_norm": 0.5198200941085815,
|
| 2816 |
+
"learning_rate": 0.00016338439915500127,
|
| 2817 |
+
"loss": 1.3554,
|
| 2818 |
+
"step": 399
|
| 2819 |
+
},
|
| 2820 |
+
{
|
| 2821 |
+
"epoch": 0.22759601706970128,
|
| 2822 |
+
"grad_norm": 0.5517953038215637,
|
| 2823 |
+
"learning_rate": 0.00016320841070639083,
|
| 2824 |
+
"loss": 1.5403,
|
| 2825 |
+
"step": 400
|
| 2826 |
+
},
|
| 2827 |
+
{
|
| 2828 |
+
"epoch": 0.22816500711237553,
|
| 2829 |
+
"grad_norm": 0.5407613515853882,
|
| 2830 |
+
"learning_rate": 0.00016303209562607154,
|
| 2831 |
+
"loss": 1.5033,
|
| 2832 |
+
"step": 401
|
| 2833 |
+
},
|
| 2834 |
+
{
|
| 2835 |
+
"epoch": 0.22873399715504977,
|
| 2836 |
+
"grad_norm": 0.5271732211112976,
|
| 2837 |
+
"learning_rate": 0.00016285545482515792,
|
| 2838 |
+
"loss": 1.4554,
|
| 2839 |
+
"step": 402
|
| 2840 |
+
},
|
| 2841 |
+
{
|
| 2842 |
+
"epoch": 0.22930298719772405,
|
| 2843 |
+
"grad_norm": 0.5387139916419983,
|
| 2844 |
+
"learning_rate": 0.0001626784892164475,
|
| 2845 |
+
"loss": 1.7347,
|
| 2846 |
+
"step": 403
|
| 2847 |
+
},
|
| 2848 |
+
{
|
| 2849 |
+
"epoch": 0.2298719772403983,
|
| 2850 |
+
"grad_norm": 0.5222678780555725,
|
| 2851 |
+
"learning_rate": 0.00016250119971441637,
|
| 2852 |
+
"loss": 1.4489,
|
| 2853 |
+
"step": 404
|
| 2854 |
+
},
|
| 2855 |
+
{
|
| 2856 |
+
"epoch": 0.23044096728307253,
|
| 2857 |
+
"grad_norm": 0.5498174428939819,
|
| 2858 |
+
"learning_rate": 0.00016232358723521436,
|
| 2859 |
+
"loss": 1.6047,
|
| 2860 |
+
"step": 405
|
| 2861 |
+
},
|
| 2862 |
+
{
|
| 2863 |
+
"epoch": 0.2310099573257468,
|
| 2864 |
+
"grad_norm": 0.5119244456291199,
|
| 2865 |
+
"learning_rate": 0.0001621456526966603,
|
| 2866 |
+
"loss": 1.5818,
|
| 2867 |
+
"step": 406
|
| 2868 |
+
},
|
| 2869 |
+
{
|
| 2870 |
+
"epoch": 0.23157894736842105,
|
| 2871 |
+
"grad_norm": 0.5584565997123718,
|
| 2872 |
+
"learning_rate": 0.00016196739701823716,
|
| 2873 |
+
"loss": 1.6863,
|
| 2874 |
+
"step": 407
|
| 2875 |
+
},
|
| 2876 |
+
{
|
| 2877 |
+
"epoch": 0.2321479374110953,
|
| 2878 |
+
"grad_norm": 0.5125292539596558,
|
| 2879 |
+
"learning_rate": 0.00016178882112108752,
|
| 2880 |
+
"loss": 1.4137,
|
| 2881 |
+
"step": 408
|
| 2882 |
+
},
|
| 2883 |
+
{
|
| 2884 |
+
"epoch": 0.23271692745376957,
|
| 2885 |
+
"grad_norm": 0.518551230430603,
|
| 2886 |
+
"learning_rate": 0.00016160992592800872,
|
| 2887 |
+
"loss": 1.304,
|
| 2888 |
+
"step": 409
|
| 2889 |
+
},
|
| 2890 |
+
{
|
| 2891 |
+
"epoch": 0.2332859174964438,
|
| 2892 |
+
"grad_norm": 0.5396437048912048,
|
| 2893 |
+
"learning_rate": 0.00016143071236344797,
|
| 2894 |
+
"loss": 1.6118,
|
| 2895 |
+
"step": 410
|
| 2896 |
+
},
|
| 2897 |
+
{
|
| 2898 |
+
"epoch": 0.23385490753911806,
|
| 2899 |
+
"grad_norm": 0.6036053895950317,
|
| 2900 |
+
"learning_rate": 0.0001612511813534978,
|
| 2901 |
+
"loss": 1.5618,
|
| 2902 |
+
"step": 411
|
| 2903 |
+
},
|
| 2904 |
+
{
|
| 2905 |
+
"epoch": 0.23442389758179233,
|
| 2906 |
+
"grad_norm": 0.5274645686149597,
|
| 2907 |
+
"learning_rate": 0.00016107133382589105,
|
| 2908 |
+
"loss": 1.5238,
|
| 2909 |
+
"step": 412
|
| 2910 |
+
},
|
| 2911 |
+
{
|
| 2912 |
+
"epoch": 0.23499288762446657,
|
| 2913 |
+
"grad_norm": 0.5649259090423584,
|
| 2914 |
+
"learning_rate": 0.00016089117070999616,
|
| 2915 |
+
"loss": 1.4841,
|
| 2916 |
+
"step": 413
|
| 2917 |
+
},
|
| 2918 |
+
{
|
| 2919 |
+
"epoch": 0.23556187766714082,
|
| 2920 |
+
"grad_norm": 0.5350419282913208,
|
| 2921 |
+
"learning_rate": 0.0001607106929368125,
|
| 2922 |
+
"loss": 1.4252,
|
| 2923 |
+
"step": 414
|
| 2924 |
+
},
|
| 2925 |
+
{
|
| 2926 |
+
"epoch": 0.2361308677098151,
|
| 2927 |
+
"grad_norm": 0.5421844124794006,
|
| 2928 |
+
"learning_rate": 0.00016052990143896535,
|
| 2929 |
+
"loss": 1.3899,
|
| 2930 |
+
"step": 415
|
| 2931 |
+
},
|
| 2932 |
+
{
|
| 2933 |
+
"epoch": 0.23669985775248933,
|
| 2934 |
+
"grad_norm": 0.5462636947631836,
|
| 2935 |
+
"learning_rate": 0.0001603487971507012,
|
| 2936 |
+
"loss": 1.6417,
|
| 2937 |
+
"step": 416
|
| 2938 |
+
},
|
| 2939 |
+
{
|
| 2940 |
+
"epoch": 0.23726884779516358,
|
| 2941 |
+
"grad_norm": 0.564430832862854,
|
| 2942 |
+
"learning_rate": 0.00016016738100788297,
|
| 2943 |
+
"loss": 1.6418,
|
| 2944 |
+
"step": 417
|
| 2945 |
+
},
|
| 2946 |
+
{
|
| 2947 |
+
"epoch": 0.23783783783783785,
|
| 2948 |
+
"grad_norm": 0.5399342179298401,
|
| 2949 |
+
"learning_rate": 0.00015998565394798492,
|
| 2950 |
+
"loss": 1.3624,
|
| 2951 |
+
"step": 418
|
| 2952 |
+
},
|
| 2953 |
+
{
|
| 2954 |
+
"epoch": 0.2384068278805121,
|
| 2955 |
+
"grad_norm": 0.5136001706123352,
|
| 2956 |
+
"learning_rate": 0.00015980361691008815,
|
| 2957 |
+
"loss": 1.3956,
|
| 2958 |
+
"step": 419
|
| 2959 |
+
},
|
| 2960 |
+
{
|
| 2961 |
+
"epoch": 0.23897581792318634,
|
| 2962 |
+
"grad_norm": 0.5325256586074829,
|
| 2963 |
+
"learning_rate": 0.00015962127083487548,
|
| 2964 |
+
"loss": 1.2396,
|
| 2965 |
+
"step": 420
|
| 2966 |
+
},
|
| 2967 |
+
{
|
| 2968 |
+
"epoch": 0.23954480796586058,
|
| 2969 |
+
"grad_norm": 0.5132279396057129,
|
| 2970 |
+
"learning_rate": 0.00015943861666462675,
|
| 2971 |
+
"loss": 1.4461,
|
| 2972 |
+
"step": 421
|
| 2973 |
+
},
|
| 2974 |
+
{
|
| 2975 |
+
"epoch": 0.24011379800853486,
|
| 2976 |
+
"grad_norm": 0.5597640872001648,
|
| 2977 |
+
"learning_rate": 0.0001592556553432139,
|
| 2978 |
+
"loss": 1.5031,
|
| 2979 |
+
"step": 422
|
| 2980 |
+
},
|
| 2981 |
+
{
|
| 2982 |
+
"epoch": 0.2406827880512091,
|
| 2983 |
+
"grad_norm": 0.5563086271286011,
|
| 2984 |
+
"learning_rate": 0.00015907238781609606,
|
| 2985 |
+
"loss": 1.4839,
|
| 2986 |
+
"step": 423
|
| 2987 |
+
},
|
| 2988 |
+
{
|
| 2989 |
+
"epoch": 0.24125177809388335,
|
| 2990 |
+
"grad_norm": 0.557904839515686,
|
| 2991 |
+
"learning_rate": 0.00015888881503031468,
|
| 2992 |
+
"loss": 1.6277,
|
| 2993 |
+
"step": 424
|
| 2994 |
+
},
|
| 2995 |
+
{
|
| 2996 |
+
"epoch": 0.24182076813655762,
|
| 2997 |
+
"grad_norm": 0.5795301198959351,
|
| 2998 |
+
"learning_rate": 0.00015870493793448864,
|
| 2999 |
+
"loss": 1.4073,
|
| 3000 |
+
"step": 425
|
| 3001 |
+
},
|
| 3002 |
+
{
|
| 3003 |
+
"epoch": 0.24238975817923186,
|
| 3004 |
+
"grad_norm": 0.5133345127105713,
|
| 3005 |
+
"learning_rate": 0.00015852075747880938,
|
| 3006 |
+
"loss": 1.3689,
|
| 3007 |
+
"step": 426
|
| 3008 |
+
},
|
| 3009 |
+
{
|
| 3010 |
+
"epoch": 0.2429587482219061,
|
| 3011 |
+
"grad_norm": 0.5455712676048279,
|
| 3012 |
+
"learning_rate": 0.00015833627461503595,
|
| 3013 |
+
"loss": 1.6118,
|
| 3014 |
+
"step": 427
|
| 3015 |
+
},
|
| 3016 |
+
{
|
| 3017 |
+
"epoch": 0.24352773826458038,
|
| 3018 |
+
"grad_norm": 0.5585681796073914,
|
| 3019 |
+
"learning_rate": 0.00015815149029649013,
|
| 3020 |
+
"loss": 1.5628,
|
| 3021 |
+
"step": 428
|
| 3022 |
+
},
|
| 3023 |
+
{
|
| 3024 |
+
"epoch": 0.24409672830725462,
|
| 3025 |
+
"grad_norm": 0.5475082397460938,
|
| 3026 |
+
"learning_rate": 0.0001579664054780514,
|
| 3027 |
+
"loss": 1.5907,
|
| 3028 |
+
"step": 429
|
| 3029 |
+
},
|
| 3030 |
+
{
|
| 3031 |
+
"epoch": 0.24466571834992887,
|
| 3032 |
+
"grad_norm": 0.530405580997467,
|
| 3033 |
+
"learning_rate": 0.0001577810211161522,
|
| 3034 |
+
"loss": 1.5324,
|
| 3035 |
+
"step": 430
|
| 3036 |
+
},
|
| 3037 |
+
{
|
| 3038 |
+
"epoch": 0.24523470839260314,
|
| 3039 |
+
"grad_norm": 0.5662998557090759,
|
| 3040 |
+
"learning_rate": 0.00015759533816877275,
|
| 3041 |
+
"loss": 1.2456,
|
| 3042 |
+
"step": 431
|
| 3043 |
+
},
|
| 3044 |
+
{
|
| 3045 |
+
"epoch": 0.24580369843527738,
|
| 3046 |
+
"grad_norm": 0.6249381303787231,
|
| 3047 |
+
"learning_rate": 0.0001574093575954363,
|
| 3048 |
+
"loss": 1.4694,
|
| 3049 |
+
"step": 432
|
| 3050 |
+
},
|
| 3051 |
+
{
|
| 3052 |
+
"epoch": 0.24637268847795163,
|
| 3053 |
+
"grad_norm": 0.5382659435272217,
|
| 3054 |
+
"learning_rate": 0.00015722308035720408,
|
| 3055 |
+
"loss": 1.6025,
|
| 3056 |
+
"step": 433
|
| 3057 |
+
},
|
| 3058 |
+
{
|
| 3059 |
+
"epoch": 0.2469416785206259,
|
| 3060 |
+
"grad_norm": 0.5415714383125305,
|
| 3061 |
+
"learning_rate": 0.00015703650741667036,
|
| 3062 |
+
"loss": 1.3643,
|
| 3063 |
+
"step": 434
|
| 3064 |
+
},
|
| 3065 |
+
{
|
| 3066 |
+
"epoch": 0.24751066856330015,
|
| 3067 |
+
"grad_norm": 0.540256917476654,
|
| 3068 |
+
"learning_rate": 0.0001568496397379574,
|
| 3069 |
+
"loss": 1.4577,
|
| 3070 |
+
"step": 435
|
| 3071 |
+
},
|
| 3072 |
+
{
|
| 3073 |
+
"epoch": 0.2480796586059744,
|
| 3074 |
+
"grad_norm": 0.5126465559005737,
|
| 3075 |
+
"learning_rate": 0.0001566624782867106,
|
| 3076 |
+
"loss": 1.5512,
|
| 3077 |
+
"step": 436
|
| 3078 |
+
},
|
| 3079 |
+
{
|
| 3080 |
+
"epoch": 0.24864864864864866,
|
| 3081 |
+
"grad_norm": 0.5520801544189453,
|
| 3082 |
+
"learning_rate": 0.0001564750240300934,
|
| 3083 |
+
"loss": 1.6545,
|
| 3084 |
+
"step": 437
|
| 3085 |
+
},
|
| 3086 |
+
{
|
| 3087 |
+
"epoch": 0.2492176386913229,
|
| 3088 |
+
"grad_norm": 0.5290027260780334,
|
| 3089 |
+
"learning_rate": 0.00015628727793678233,
|
| 3090 |
+
"loss": 1.5391,
|
| 3091 |
+
"step": 438
|
| 3092 |
+
},
|
| 3093 |
+
{
|
| 3094 |
+
"epoch": 0.24978662873399715,
|
| 3095 |
+
"grad_norm": 0.5835967659950256,
|
| 3096 |
+
"learning_rate": 0.00015609924097696203,
|
| 3097 |
+
"loss": 1.4657,
|
| 3098 |
+
"step": 439
|
| 3099 |
+
},
|
| 3100 |
+
{
|
| 3101 |
+
"epoch": 0.2503556187766714,
|
| 3102 |
+
"grad_norm": 0.5586689710617065,
|
| 3103 |
+
"learning_rate": 0.00015591091412232012,
|
| 3104 |
+
"loss": 1.5222,
|
| 3105 |
+
"step": 440
|
| 3106 |
+
},
|
| 3107 |
+
{
|
| 3108 |
+
"epoch": 0.25092460881934564,
|
| 3109 |
+
"grad_norm": 0.5292929410934448,
|
| 3110 |
+
"learning_rate": 0.00015572229834604235,
|
| 3111 |
+
"loss": 1.4726,
|
| 3112 |
+
"step": 441
|
| 3113 |
+
},
|
| 3114 |
+
{
|
| 3115 |
+
"epoch": 0.25149359886201994,
|
| 3116 |
+
"grad_norm": 0.5165523290634155,
|
| 3117 |
+
"learning_rate": 0.00015553339462280748,
|
| 3118 |
+
"loss": 1.4154,
|
| 3119 |
+
"step": 442
|
| 3120 |
+
},
|
| 3121 |
+
{
|
| 3122 |
+
"epoch": 0.2520625889046942,
|
| 3123 |
+
"grad_norm": 0.5475851893424988,
|
| 3124 |
+
"learning_rate": 0.00015534420392878211,
|
| 3125 |
+
"loss": 1.5885,
|
| 3126 |
+
"step": 443
|
| 3127 |
+
},
|
| 3128 |
+
{
|
| 3129 |
+
"epoch": 0.25263157894736843,
|
| 3130 |
+
"grad_norm": 0.5540974736213684,
|
| 3131 |
+
"learning_rate": 0.00015515472724161598,
|
| 3132 |
+
"loss": 1.4529,
|
| 3133 |
+
"step": 444
|
| 3134 |
+
},
|
| 3135 |
+
{
|
| 3136 |
+
"epoch": 0.2532005689900427,
|
| 3137 |
+
"grad_norm": 0.5251240730285645,
|
| 3138 |
+
"learning_rate": 0.00015496496554043653,
|
| 3139 |
+
"loss": 1.3794,
|
| 3140 |
+
"step": 445
|
| 3141 |
+
},
|
| 3142 |
+
{
|
| 3143 |
+
"epoch": 0.2537695590327169,
|
| 3144 |
+
"grad_norm": 0.5751416683197021,
|
| 3145 |
+
"learning_rate": 0.00015477491980584417,
|
| 3146 |
+
"loss": 1.5417,
|
| 3147 |
+
"step": 446
|
| 3148 |
+
},
|
| 3149 |
+
{
|
| 3150 |
+
"epoch": 0.25433854907539116,
|
| 3151 |
+
"grad_norm": 0.5411546230316162,
|
| 3152 |
+
"learning_rate": 0.00015458459101990693,
|
| 3153 |
+
"loss": 1.6787,
|
| 3154 |
+
"step": 447
|
| 3155 |
+
},
|
| 3156 |
+
{
|
| 3157 |
+
"epoch": 0.2549075391180654,
|
| 3158 |
+
"grad_norm": 0.5817191004753113,
|
| 3159 |
+
"learning_rate": 0.00015439398016615558,
|
| 3160 |
+
"loss": 1.5382,
|
| 3161 |
+
"step": 448
|
| 3162 |
+
},
|
| 3163 |
+
{
|
| 3164 |
+
"epoch": 0.2554765291607397,
|
| 3165 |
+
"grad_norm": 0.505901038646698,
|
| 3166 |
+
"learning_rate": 0.00015420308822957848,
|
| 3167 |
+
"loss": 1.3885,
|
| 3168 |
+
"step": 449
|
| 3169 |
+
},
|
| 3170 |
+
{
|
| 3171 |
+
"epoch": 0.25604551920341395,
|
| 3172 |
+
"grad_norm": 0.5091856718063354,
|
| 3173 |
+
"learning_rate": 0.00015401191619661658,
|
| 3174 |
+
"loss": 1.4067,
|
| 3175 |
+
"step": 450
|
| 3176 |
+
},
|
| 3177 |
+
{
|
| 3178 |
+
"epoch": 0.2566145092460882,
|
| 3179 |
+
"grad_norm": 0.5677408576011658,
|
| 3180 |
+
"learning_rate": 0.00015382046505515803,
|
| 3181 |
+
"loss": 1.5578,
|
| 3182 |
+
"step": 451
|
| 3183 |
+
},
|
| 3184 |
+
{
|
| 3185 |
+
"epoch": 0.25718349928876244,
|
| 3186 |
+
"grad_norm": 0.5270281434059143,
|
| 3187 |
+
"learning_rate": 0.00015362873579453348,
|
| 3188 |
+
"loss": 1.3921,
|
| 3189 |
+
"step": 452
|
| 3190 |
+
},
|
| 3191 |
+
{
|
| 3192 |
+
"epoch": 0.2577524893314367,
|
| 3193 |
+
"grad_norm": 0.5784454345703125,
|
| 3194 |
+
"learning_rate": 0.00015343672940551067,
|
| 3195 |
+
"loss": 1.5433,
|
| 3196 |
+
"step": 453
|
| 3197 |
+
},
|
| 3198 |
+
{
|
| 3199 |
+
"epoch": 0.25832147937411093,
|
| 3200 |
+
"grad_norm": 0.5490661859512329,
|
| 3201 |
+
"learning_rate": 0.00015324444688028947,
|
| 3202 |
+
"loss": 1.4543,
|
| 3203 |
+
"step": 454
|
| 3204 |
+
},
|
| 3205 |
+
{
|
| 3206 |
+
"epoch": 0.25889046941678523,
|
| 3207 |
+
"grad_norm": 0.5555963516235352,
|
| 3208 |
+
"learning_rate": 0.00015305188921249665,
|
| 3209 |
+
"loss": 1.3882,
|
| 3210 |
+
"step": 455
|
| 3211 |
+
},
|
| 3212 |
+
{
|
| 3213 |
+
"epoch": 0.2594594594594595,
|
| 3214 |
+
"grad_norm": 0.5918729305267334,
|
| 3215 |
+
"learning_rate": 0.0001528590573971808,
|
| 3216 |
+
"loss": 1.6544,
|
| 3217 |
+
"step": 456
|
| 3218 |
+
},
|
| 3219 |
+
{
|
| 3220 |
+
"epoch": 0.2600284495021337,
|
| 3221 |
+
"grad_norm": 0.5301398038864136,
|
| 3222 |
+
"learning_rate": 0.00015266595243080714,
|
| 3223 |
+
"loss": 1.6201,
|
| 3224 |
+
"step": 457
|
| 3225 |
+
},
|
| 3226 |
+
{
|
| 3227 |
+
"epoch": 0.26059743954480796,
|
| 3228 |
+
"grad_norm": 0.5327576994895935,
|
| 3229 |
+
"learning_rate": 0.0001524725753112525,
|
| 3230 |
+
"loss": 1.6861,
|
| 3231 |
+
"step": 458
|
| 3232 |
+
},
|
| 3233 |
+
{
|
| 3234 |
+
"epoch": 0.2611664295874822,
|
| 3235 |
+
"grad_norm": 0.5090361833572388,
|
| 3236 |
+
"learning_rate": 0.00015227892703780003,
|
| 3237 |
+
"loss": 1.2298,
|
| 3238 |
+
"step": 459
|
| 3239 |
+
},
|
| 3240 |
+
{
|
| 3241 |
+
"epoch": 0.26173541963015645,
|
| 3242 |
+
"grad_norm": 0.5667193531990051,
|
| 3243 |
+
"learning_rate": 0.00015208500861113401,
|
| 3244 |
+
"loss": 1.4061,
|
| 3245 |
+
"step": 460
|
| 3246 |
+
},
|
| 3247 |
+
{
|
| 3248 |
+
"epoch": 0.26230440967283075,
|
| 3249 |
+
"grad_norm": 0.5170226097106934,
|
| 3250 |
+
"learning_rate": 0.00015189082103333484,
|
| 3251 |
+
"loss": 1.3402,
|
| 3252 |
+
"step": 461
|
| 3253 |
+
},
|
| 3254 |
+
{
|
| 3255 |
+
"epoch": 0.262873399715505,
|
| 3256 |
+
"grad_norm": 0.5260865688323975,
|
| 3257 |
+
"learning_rate": 0.0001516963653078737,
|
| 3258 |
+
"loss": 1.4571,
|
| 3259 |
+
"step": 462
|
| 3260 |
+
},
|
| 3261 |
+
{
|
| 3262 |
+
"epoch": 0.26344238975817924,
|
| 3263 |
+
"grad_norm": 0.5484414100646973,
|
| 3264 |
+
"learning_rate": 0.00015150164243960752,
|
| 3265 |
+
"loss": 1.4822,
|
| 3266 |
+
"step": 463
|
| 3267 |
+
},
|
| 3268 |
+
{
|
| 3269 |
+
"epoch": 0.2640113798008535,
|
| 3270 |
+
"grad_norm": 0.5555655360221863,
|
| 3271 |
+
"learning_rate": 0.00015130665343477358,
|
| 3272 |
+
"loss": 1.4383,
|
| 3273 |
+
"step": 464
|
| 3274 |
+
},
|
| 3275 |
+
{
|
| 3276 |
+
"epoch": 0.26458036984352773,
|
| 3277 |
+
"grad_norm": 0.5628737211227417,
|
| 3278 |
+
"learning_rate": 0.0001511113993009845,
|
| 3279 |
+
"loss": 1.6092,
|
| 3280 |
+
"step": 465
|
| 3281 |
+
},
|
| 3282 |
+
{
|
| 3283 |
+
"epoch": 0.265149359886202,
|
| 3284 |
+
"grad_norm": 0.5401899814605713,
|
| 3285 |
+
"learning_rate": 0.00015091588104722297,
|
| 3286 |
+
"loss": 1.4347,
|
| 3287 |
+
"step": 466
|
| 3288 |
+
},
|
| 3289 |
+
{
|
| 3290 |
+
"epoch": 0.2657183499288762,
|
| 3291 |
+
"grad_norm": 0.5575911998748779,
|
| 3292 |
+
"learning_rate": 0.00015072009968383656,
|
| 3293 |
+
"loss": 1.6627,
|
| 3294 |
+
"step": 467
|
| 3295 |
+
},
|
| 3296 |
+
{
|
| 3297 |
+
"epoch": 0.2662873399715505,
|
| 3298 |
+
"grad_norm": 0.539851725101471,
|
| 3299 |
+
"learning_rate": 0.00015052405622253235,
|
| 3300 |
+
"loss": 1.5648,
|
| 3301 |
+
"step": 468
|
| 3302 |
+
},
|
| 3303 |
+
{
|
| 3304 |
+
"epoch": 0.26685633001422476,
|
| 3305 |
+
"grad_norm": 0.5497231483459473,
|
| 3306 |
+
"learning_rate": 0.00015032775167637193,
|
| 3307 |
+
"loss": 1.5671,
|
| 3308 |
+
"step": 469
|
| 3309 |
+
},
|
| 3310 |
+
{
|
| 3311 |
+
"epoch": 0.267425320056899,
|
| 3312 |
+
"grad_norm": 0.5294174551963806,
|
| 3313 |
+
"learning_rate": 0.00015013118705976602,
|
| 3314 |
+
"loss": 1.4519,
|
| 3315 |
+
"step": 470
|
| 3316 |
+
},
|
| 3317 |
+
{
|
| 3318 |
+
"epoch": 0.26799431009957325,
|
| 3319 |
+
"grad_norm": 0.5508366227149963,
|
| 3320 |
+
"learning_rate": 0.00014993436338846925,
|
| 3321 |
+
"loss": 1.2089,
|
| 3322 |
+
"step": 471
|
| 3323 |
+
},
|
| 3324 |
+
{
|
| 3325 |
+
"epoch": 0.2685633001422475,
|
| 3326 |
+
"grad_norm": 0.530941903591156,
|
| 3327 |
+
"learning_rate": 0.00014973728167957498,
|
| 3328 |
+
"loss": 1.2298,
|
| 3329 |
+
"step": 472
|
| 3330 |
+
},
|
| 3331 |
+
{
|
| 3332 |
+
"epoch": 0.26913229018492174,
|
| 3333 |
+
"grad_norm": 0.572995126247406,
|
| 3334 |
+
"learning_rate": 0.00014953994295150986,
|
| 3335 |
+
"loss": 1.5102,
|
| 3336 |
+
"step": 473
|
| 3337 |
+
},
|
| 3338 |
+
{
|
| 3339 |
+
"epoch": 0.26970128022759604,
|
| 3340 |
+
"grad_norm": 0.5313156843185425,
|
| 3341 |
+
"learning_rate": 0.00014934234822402883,
|
| 3342 |
+
"loss": 1.3345,
|
| 3343 |
+
"step": 474
|
| 3344 |
+
},
|
| 3345 |
+
{
|
| 3346 |
+
"epoch": 0.2702702702702703,
|
| 3347 |
+
"grad_norm": 0.5710895657539368,
|
| 3348 |
+
"learning_rate": 0.0001491444985182097,
|
| 3349 |
+
"loss": 1.4461,
|
| 3350 |
+
"step": 475
|
| 3351 |
+
},
|
| 3352 |
+
{
|
| 3353 |
+
"epoch": 0.27083926031294453,
|
| 3354 |
+
"grad_norm": 0.5655211210250854,
|
| 3355 |
+
"learning_rate": 0.00014894639485644784,
|
| 3356 |
+
"loss": 1.6591,
|
| 3357 |
+
"step": 476
|
| 3358 |
+
},
|
| 3359 |
+
{
|
| 3360 |
+
"epoch": 0.2714082503556188,
|
| 3361 |
+
"grad_norm": 0.5507573485374451,
|
| 3362 |
+
"learning_rate": 0.00014874803826245089,
|
| 3363 |
+
"loss": 1.3442,
|
| 3364 |
+
"step": 477
|
| 3365 |
+
},
|
| 3366 |
+
{
|
| 3367 |
+
"epoch": 0.271977240398293,
|
| 3368 |
+
"grad_norm": 0.5628292560577393,
|
| 3369 |
+
"learning_rate": 0.00014854942976123367,
|
| 3370 |
+
"loss": 1.6926,
|
| 3371 |
+
"step": 478
|
| 3372 |
+
},
|
| 3373 |
+
{
|
| 3374 |
+
"epoch": 0.27254623044096726,
|
| 3375 |
+
"grad_norm": 0.5278828740119934,
|
| 3376 |
+
"learning_rate": 0.00014835057037911268,
|
| 3377 |
+
"loss": 1.3193,
|
| 3378 |
+
"step": 479
|
| 3379 |
+
},
|
| 3380 |
+
{
|
| 3381 |
+
"epoch": 0.27311522048364156,
|
| 3382 |
+
"grad_norm": 0.550122857093811,
|
| 3383 |
+
"learning_rate": 0.0001481514611437008,
|
| 3384 |
+
"loss": 1.4085,
|
| 3385 |
+
"step": 480
|
| 3386 |
+
},
|
| 3387 |
+
{
|
| 3388 |
+
"epoch": 0.2736842105263158,
|
| 3389 |
+
"grad_norm": 0.5174803733825684,
|
| 3390 |
+
"learning_rate": 0.00014795210308390211,
|
| 3391 |
+
"loss": 1.2066,
|
| 3392 |
+
"step": 481
|
| 3393 |
+
},
|
| 3394 |
+
{
|
| 3395 |
+
"epoch": 0.27425320056899005,
|
| 3396 |
+
"grad_norm": 0.5421956777572632,
|
| 3397 |
+
"learning_rate": 0.00014775249722990646,
|
| 3398 |
+
"loss": 1.4261,
|
| 3399 |
+
"step": 482
|
| 3400 |
+
},
|
| 3401 |
+
{
|
| 3402 |
+
"epoch": 0.2748221906116643,
|
| 3403 |
+
"grad_norm": 0.5158098936080933,
|
| 3404 |
+
"learning_rate": 0.00014755264461318416,
|
| 3405 |
+
"loss": 1.277,
|
| 3406 |
+
"step": 483
|
| 3407 |
+
},
|
| 3408 |
+
{
|
| 3409 |
+
"epoch": 0.27539118065433854,
|
| 3410 |
+
"grad_norm": 0.5564343929290771,
|
| 3411 |
+
"learning_rate": 0.0001473525462664808,
|
| 3412 |
+
"loss": 1.5075,
|
| 3413 |
+
"step": 484
|
| 3414 |
+
},
|
| 3415 |
+
{
|
| 3416 |
+
"epoch": 0.2759601706970128,
|
| 3417 |
+
"grad_norm": 0.5485411882400513,
|
| 3418 |
+
"learning_rate": 0.0001471522032238116,
|
| 3419 |
+
"loss": 1.4847,
|
| 3420 |
+
"step": 485
|
| 3421 |
+
},
|
| 3422 |
+
{
|
| 3423 |
+
"epoch": 0.27652916073968703,
|
| 3424 |
+
"grad_norm": 0.5449703931808472,
|
| 3425 |
+
"learning_rate": 0.00014695161652045641,
|
| 3426 |
+
"loss": 1.6162,
|
| 3427 |
+
"step": 486
|
| 3428 |
+
},
|
| 3429 |
+
{
|
| 3430 |
+
"epoch": 0.27709815078236133,
|
| 3431 |
+
"grad_norm": 0.5641449093818665,
|
| 3432 |
+
"learning_rate": 0.00014675078719295415,
|
| 3433 |
+
"loss": 1.3614,
|
| 3434 |
+
"step": 487
|
| 3435 |
+
},
|
| 3436 |
+
{
|
| 3437 |
+
"epoch": 0.2776671408250356,
|
| 3438 |
+
"grad_norm": 0.5554978251457214,
|
| 3439 |
+
"learning_rate": 0.00014654971627909747,
|
| 3440 |
+
"loss": 1.5019,
|
| 3441 |
+
"step": 488
|
| 3442 |
+
},
|
| 3443 |
+
{
|
| 3444 |
+
"epoch": 0.2782361308677098,
|
| 3445 |
+
"grad_norm": 0.5530039668083191,
|
| 3446 |
+
"learning_rate": 0.0001463484048179275,
|
| 3447 |
+
"loss": 1.5116,
|
| 3448 |
+
"step": 489
|
| 3449 |
+
},
|
| 3450 |
+
{
|
| 3451 |
+
"epoch": 0.27880512091038406,
|
| 3452 |
+
"grad_norm": 0.5324894189834595,
|
| 3453 |
+
"learning_rate": 0.00014614685384972835,
|
| 3454 |
+
"loss": 1.3575,
|
| 3455 |
+
"step": 490
|
| 3456 |
+
},
|
| 3457 |
+
{
|
| 3458 |
+
"epoch": 0.2793741109530583,
|
| 3459 |
+
"grad_norm": 0.5472353100776672,
|
| 3460 |
+
"learning_rate": 0.0001459450644160218,
|
| 3461 |
+
"loss": 1.5364,
|
| 3462 |
+
"step": 491
|
| 3463 |
+
},
|
| 3464 |
+
{
|
| 3465 |
+
"epoch": 0.27994310099573255,
|
| 3466 |
+
"grad_norm": 0.5706241130828857,
|
| 3467 |
+
"learning_rate": 0.00014574303755956195,
|
| 3468 |
+
"loss": 1.5958,
|
| 3469 |
+
"step": 492
|
| 3470 |
+
},
|
| 3471 |
+
{
|
| 3472 |
+
"epoch": 0.28051209103840685,
|
| 3473 |
+
"grad_norm": 0.5553603768348694,
|
| 3474 |
+
"learning_rate": 0.00014554077432432975,
|
| 3475 |
+
"loss": 1.5664,
|
| 3476 |
+
"step": 493
|
| 3477 |
+
},
|
| 3478 |
+
{
|
| 3479 |
+
"epoch": 0.2810810810810811,
|
| 3480 |
+
"grad_norm": 0.542325496673584,
|
| 3481 |
+
"learning_rate": 0.00014533827575552766,
|
| 3482 |
+
"loss": 1.4275,
|
| 3483 |
+
"step": 494
|
| 3484 |
+
},
|
| 3485 |
+
{
|
| 3486 |
+
"epoch": 0.28165007112375534,
|
| 3487 |
+
"grad_norm": 0.6180648803710938,
|
| 3488 |
+
"learning_rate": 0.00014513554289957424,
|
| 3489 |
+
"loss": 1.3948,
|
| 3490 |
+
"step": 495
|
| 3491 |
+
},
|
| 3492 |
+
{
|
| 3493 |
+
"epoch": 0.2822190611664296,
|
| 3494 |
+
"grad_norm": 0.6009839177131653,
|
| 3495 |
+
"learning_rate": 0.0001449325768040987,
|
| 3496 |
+
"loss": 1.6545,
|
| 3497 |
+
"step": 496
|
| 3498 |
+
},
|
| 3499 |
+
{
|
| 3500 |
+
"epoch": 0.28278805120910383,
|
| 3501 |
+
"grad_norm": 0.58924800157547,
|
| 3502 |
+
"learning_rate": 0.00014472937851793557,
|
| 3503 |
+
"loss": 1.3284,
|
| 3504 |
+
"step": 497
|
| 3505 |
+
},
|
| 3506 |
+
{
|
| 3507 |
+
"epoch": 0.2833570412517781,
|
| 3508 |
+
"grad_norm": 0.5391841530799866,
|
| 3509 |
+
"learning_rate": 0.0001445259490911192,
|
| 3510 |
+
"loss": 1.3593,
|
| 3511 |
+
"step": 498
|
| 3512 |
+
},
|
| 3513 |
+
{
|
| 3514 |
+
"epoch": 0.2839260312944524,
|
| 3515 |
+
"grad_norm": 0.562134325504303,
|
| 3516 |
+
"learning_rate": 0.0001443222895748784,
|
| 3517 |
+
"loss": 1.4458,
|
| 3518 |
+
"step": 499
|
| 3519 |
+
},
|
| 3520 |
+
{
|
| 3521 |
+
"epoch": 0.2844950213371266,
|
| 3522 |
+
"grad_norm": 0.5663224458694458,
|
| 3523 |
+
"learning_rate": 0.000144118401021631,
|
| 3524 |
+
"loss": 1.5136,
|
| 3525 |
+
"step": 500
|
| 3526 |
+
},
|
| 3527 |
+
{
|
| 3528 |
+
"epoch": 0.28506401137980086,
|
| 3529 |
+
"grad_norm": 0.5762481689453125,
|
| 3530 |
+
"learning_rate": 0.00014391428448497825,
|
| 3531 |
+
"loss": 1.5841,
|
| 3532 |
+
"step": 501
|
| 3533 |
+
},
|
| 3534 |
+
{
|
| 3535 |
+
"epoch": 0.2856330014224751,
|
| 3536 |
+
"grad_norm": 0.5568172931671143,
|
| 3537 |
+
"learning_rate": 0.00014370994101969967,
|
| 3538 |
+
"loss": 1.5863,
|
| 3539 |
+
"step": 502
|
| 3540 |
+
},
|
| 3541 |
+
{
|
| 3542 |
+
"epoch": 0.28620199146514935,
|
| 3543 |
+
"grad_norm": 0.5461404323577881,
|
| 3544 |
+
"learning_rate": 0.00014350537168174738,
|
| 3545 |
+
"loss": 1.4175,
|
| 3546 |
+
"step": 503
|
| 3547 |
+
},
|
| 3548 |
+
{
|
| 3549 |
+
"epoch": 0.2867709815078236,
|
| 3550 |
+
"grad_norm": 0.5522152781486511,
|
| 3551 |
+
"learning_rate": 0.00014330057752824068,
|
| 3552 |
+
"loss": 1.5865,
|
| 3553 |
+
"step": 504
|
| 3554 |
+
},
|
| 3555 |
+
{
|
| 3556 |
+
"epoch": 0.28733997155049784,
|
| 3557 |
+
"grad_norm": 0.5333879590034485,
|
| 3558 |
+
"learning_rate": 0.00014309555961746067,
|
| 3559 |
+
"loss": 1.4804,
|
| 3560 |
+
"step": 505
|
| 3561 |
+
},
|
| 3562 |
+
{
|
| 3563 |
+
"epoch": 0.28790896159317214,
|
| 3564 |
+
"grad_norm": 0.5656757354736328,
|
| 3565 |
+
"learning_rate": 0.00014289031900884463,
|
| 3566 |
+
"loss": 1.4009,
|
| 3567 |
+
"step": 506
|
| 3568 |
+
},
|
| 3569 |
+
{
|
| 3570 |
+
"epoch": 0.2884779516358464,
|
| 3571 |
+
"grad_norm": 0.55275559425354,
|
| 3572 |
+
"learning_rate": 0.00014268485676298078,
|
| 3573 |
+
"loss": 1.3477,
|
| 3574 |
+
"step": 507
|
| 3575 |
+
},
|
| 3576 |
+
{
|
| 3577 |
+
"epoch": 0.28904694167852063,
|
| 3578 |
+
"grad_norm": 0.5528755784034729,
|
| 3579 |
+
"learning_rate": 0.00014247917394160254,
|
| 3580 |
+
"loss": 1.6965,
|
| 3581 |
+
"step": 508
|
| 3582 |
+
},
|
| 3583 |
+
{
|
| 3584 |
+
"epoch": 0.2896159317211949,
|
| 3585 |
+
"grad_norm": 0.5423591732978821,
|
| 3586 |
+
"learning_rate": 0.00014227327160758316,
|
| 3587 |
+
"loss": 1.3725,
|
| 3588 |
+
"step": 509
|
| 3589 |
+
},
|
| 3590 |
+
{
|
| 3591 |
+
"epoch": 0.2901849217638691,
|
| 3592 |
+
"grad_norm": 0.5610995292663574,
|
| 3593 |
+
"learning_rate": 0.00014206715082493032,
|
| 3594 |
+
"loss": 1.5135,
|
| 3595 |
+
"step": 510
|
| 3596 |
+
},
|
| 3597 |
+
{
|
| 3598 |
+
"epoch": 0.29075391180654336,
|
| 3599 |
+
"grad_norm": 0.550565242767334,
|
| 3600 |
+
"learning_rate": 0.00014186081265878047,
|
| 3601 |
+
"loss": 1.2824,
|
| 3602 |
+
"step": 511
|
| 3603 |
+
},
|
| 3604 |
+
{
|
| 3605 |
+
"epoch": 0.29132290184921766,
|
| 3606 |
+
"grad_norm": 0.5238208174705505,
|
| 3607 |
+
"learning_rate": 0.00014165425817539343,
|
| 3608 |
+
"loss": 1.3519,
|
| 3609 |
+
"step": 512
|
| 3610 |
+
},
|
| 3611 |
+
{
|
| 3612 |
+
"epoch": 0.2918918918918919,
|
| 3613 |
+
"grad_norm": 0.5561342835426331,
|
| 3614 |
+
"learning_rate": 0.00014144748844214684,
|
| 3615 |
+
"loss": 1.4381,
|
| 3616 |
+
"step": 513
|
| 3617 |
+
},
|
| 3618 |
+
{
|
| 3619 |
+
"epoch": 0.29246088193456615,
|
| 3620 |
+
"grad_norm": 0.5522477030754089,
|
| 3621 |
+
"learning_rate": 0.0001412405045275306,
|
| 3622 |
+
"loss": 1.5873,
|
| 3623 |
+
"step": 514
|
| 3624 |
+
},
|
| 3625 |
+
{
|
| 3626 |
+
"epoch": 0.2930298719772404,
|
| 3627 |
+
"grad_norm": 0.5491191744804382,
|
| 3628 |
+
"learning_rate": 0.0001410333075011415,
|
| 3629 |
+
"loss": 1.4527,
|
| 3630 |
+
"step": 515
|
| 3631 |
+
},
|
| 3632 |
+
{
|
| 3633 |
+
"epoch": 0.29359886201991464,
|
| 3634 |
+
"grad_norm": 0.5521331429481506,
|
| 3635 |
+
"learning_rate": 0.00014082589843367752,
|
| 3636 |
+
"loss": 1.6342,
|
| 3637 |
+
"step": 516
|
| 3638 |
+
},
|
| 3639 |
+
{
|
| 3640 |
+
"epoch": 0.2941678520625889,
|
| 3641 |
+
"grad_norm": 0.5632197856903076,
|
| 3642 |
+
"learning_rate": 0.0001406182783969324,
|
| 3643 |
+
"loss": 1.4758,
|
| 3644 |
+
"step": 517
|
| 3645 |
+
},
|
| 3646 |
+
{
|
| 3647 |
+
"epoch": 0.29473684210526313,
|
| 3648 |
+
"grad_norm": 0.5883782505989075,
|
| 3649 |
+
"learning_rate": 0.00014041044846379,
|
| 3650 |
+
"loss": 1.4963,
|
| 3651 |
+
"step": 518
|
| 3652 |
+
},
|
| 3653 |
+
{
|
| 3654 |
+
"epoch": 0.29530583214793743,
|
| 3655 |
+
"grad_norm": 0.5621269941329956,
|
| 3656 |
+
"learning_rate": 0.00014020240970821893,
|
| 3657 |
+
"loss": 1.6292,
|
| 3658 |
+
"step": 519
|
| 3659 |
+
},
|
| 3660 |
+
{
|
| 3661 |
+
"epoch": 0.2958748221906117,
|
| 3662 |
+
"grad_norm": 0.5850755572319031,
|
| 3663 |
+
"learning_rate": 0.00013999416320526685,
|
| 3664 |
+
"loss": 1.5853,
|
| 3665 |
+
"step": 520
|
| 3666 |
+
},
|
| 3667 |
+
{
|
| 3668 |
+
"epoch": 0.2964438122332859,
|
| 3669 |
+
"grad_norm": 0.5468763113021851,
|
| 3670 |
+
"learning_rate": 0.00013978571003105502,
|
| 3671 |
+
"loss": 1.4112,
|
| 3672 |
+
"step": 521
|
| 3673 |
+
},
|
| 3674 |
+
{
|
| 3675 |
+
"epoch": 0.29701280227596016,
|
| 3676 |
+
"grad_norm": 0.5954291820526123,
|
| 3677 |
+
"learning_rate": 0.00013957705126277253,
|
| 3678 |
+
"loss": 1.4785,
|
| 3679 |
+
"step": 522
|
| 3680 |
+
},
|
| 3681 |
+
{
|
| 3682 |
+
"epoch": 0.2975817923186344,
|
| 3683 |
+
"grad_norm": 0.5438716411590576,
|
| 3684 |
+
"learning_rate": 0.00013936818797867102,
|
| 3685 |
+
"loss": 1.6543,
|
| 3686 |
+
"step": 523
|
| 3687 |
+
},
|
| 3688 |
+
{
|
| 3689 |
+
"epoch": 0.29815078236130865,
|
| 3690 |
+
"grad_norm": 0.5444651246070862,
|
| 3691 |
+
"learning_rate": 0.00013915912125805893,
|
| 3692 |
+
"loss": 1.5327,
|
| 3693 |
+
"step": 524
|
| 3694 |
+
},
|
| 3695 |
+
{
|
| 3696 |
+
"epoch": 0.29871977240398295,
|
| 3697 |
+
"grad_norm": 0.5755301117897034,
|
| 3698 |
+
"learning_rate": 0.00013894985218129602,
|
| 3699 |
+
"loss": 1.5734,
|
| 3700 |
+
"step": 525
|
| 3701 |
+
},
|
| 3702 |
+
{
|
| 3703 |
+
"epoch": 0.2992887624466572,
|
| 3704 |
+
"grad_norm": 0.5267385244369507,
|
| 3705 |
+
"learning_rate": 0.0001387403818297876,
|
| 3706 |
+
"loss": 1.5172,
|
| 3707 |
+
"step": 526
|
| 3708 |
+
},
|
| 3709 |
+
{
|
| 3710 |
+
"epoch": 0.29985775248933144,
|
| 3711 |
+
"grad_norm": 0.5721412301063538,
|
| 3712 |
+
"learning_rate": 0.00013853071128597924,
|
| 3713 |
+
"loss": 1.617,
|
| 3714 |
+
"step": 527
|
| 3715 |
+
},
|
| 3716 |
+
{
|
| 3717 |
+
"epoch": 0.3004267425320057,
|
| 3718 |
+
"grad_norm": 0.547497570514679,
|
| 3719 |
+
"learning_rate": 0.00013832084163335084,
|
| 3720 |
+
"loss": 1.4242,
|
| 3721 |
+
"step": 528
|
| 3722 |
+
},
|
| 3723 |
+
{
|
| 3724 |
+
"epoch": 0.30099573257467993,
|
| 3725 |
+
"grad_norm": 0.5331338047981262,
|
| 3726 |
+
"learning_rate": 0.00013811077395641135,
|
| 3727 |
+
"loss": 1.2921,
|
| 3728 |
+
"step": 529
|
| 3729 |
+
},
|
| 3730 |
+
{
|
| 3731 |
+
"epoch": 0.3015647226173542,
|
| 3732 |
+
"grad_norm": 0.5468523502349854,
|
| 3733 |
+
"learning_rate": 0.00013790050934069296,
|
| 3734 |
+
"loss": 1.3264,
|
| 3735 |
+
"step": 530
|
| 3736 |
+
},
|
| 3737 |
+
{
|
| 3738 |
+
"epoch": 0.3021337126600285,
|
| 3739 |
+
"grad_norm": 0.538796067237854,
|
| 3740 |
+
"learning_rate": 0.00013769004887274547,
|
| 3741 |
+
"loss": 1.4284,
|
| 3742 |
+
"step": 531
|
| 3743 |
+
},
|
| 3744 |
+
{
|
| 3745 |
+
"epoch": 0.3027027027027027,
|
| 3746 |
+
"grad_norm": 0.5727618932723999,
|
| 3747 |
+
"learning_rate": 0.0001374793936401309,
|
| 3748 |
+
"loss": 1.509,
|
| 3749 |
+
"step": 532
|
| 3750 |
+
},
|
| 3751 |
+
{
|
| 3752 |
+
"epoch": 0.30327169274537696,
|
| 3753 |
+
"grad_norm": 0.5127109289169312,
|
| 3754 |
+
"learning_rate": 0.00013726854473141765,
|
| 3755 |
+
"loss": 1.3145,
|
| 3756 |
+
"step": 533
|
| 3757 |
+
},
|
| 3758 |
+
{
|
| 3759 |
+
"epoch": 0.3038406827880512,
|
| 3760 |
+
"grad_norm": 0.5412492156028748,
|
| 3761 |
+
"learning_rate": 0.00013705750323617495,
|
| 3762 |
+
"loss": 1.4385,
|
| 3763 |
+
"step": 534
|
| 3764 |
+
},
|
| 3765 |
+
{
|
| 3766 |
+
"epoch": 0.30440967283072545,
|
| 3767 |
+
"grad_norm": 0.6073004603385925,
|
| 3768 |
+
"learning_rate": 0.0001368462702449672,
|
| 3769 |
+
"loss": 1.585,
|
| 3770 |
+
"step": 535
|
| 3771 |
+
},
|
| 3772 |
+
{
|
| 3773 |
+
"epoch": 0.3049786628733997,
|
| 3774 |
+
"grad_norm": 0.6075984239578247,
|
| 3775 |
+
"learning_rate": 0.00013663484684934836,
|
| 3776 |
+
"loss": 1.6782,
|
| 3777 |
+
"step": 536
|
| 3778 |
+
},
|
| 3779 |
+
{
|
| 3780 |
+
"epoch": 0.30554765291607394,
|
| 3781 |
+
"grad_norm": 0.5950874090194702,
|
| 3782 |
+
"learning_rate": 0.0001364232341418564,
|
| 3783 |
+
"loss": 1.6634,
|
| 3784 |
+
"step": 537
|
| 3785 |
+
},
|
| 3786 |
+
{
|
| 3787 |
+
"epoch": 0.30611664295874824,
|
| 3788 |
+
"grad_norm": 0.5442619323730469,
|
| 3789 |
+
"learning_rate": 0.00013621143321600746,
|
| 3790 |
+
"loss": 1.6321,
|
| 3791 |
+
"step": 538
|
| 3792 |
+
},
|
| 3793 |
+
{
|
| 3794 |
+
"epoch": 0.3066856330014225,
|
| 3795 |
+
"grad_norm": 0.5568251609802246,
|
| 3796 |
+
"learning_rate": 0.00013599944516629045,
|
| 3797 |
+
"loss": 1.3718,
|
| 3798 |
+
"step": 539
|
| 3799 |
+
},
|
| 3800 |
+
{
|
| 3801 |
+
"epoch": 0.30725462304409673,
|
| 3802 |
+
"grad_norm": 0.5321120023727417,
|
| 3803 |
+
"learning_rate": 0.00013578727108816104,
|
| 3804 |
+
"loss": 1.3387,
|
| 3805 |
+
"step": 540
|
| 3806 |
+
},
|
| 3807 |
+
{
|
| 3808 |
+
"epoch": 0.307823613086771,
|
| 3809 |
+
"grad_norm": 0.6142572164535522,
|
| 3810 |
+
"learning_rate": 0.00013557491207803635,
|
| 3811 |
+
"loss": 1.4013,
|
| 3812 |
+
"step": 541
|
| 3813 |
+
},
|
| 3814 |
+
{
|
| 3815 |
+
"epoch": 0.3083926031294452,
|
| 3816 |
+
"grad_norm": 0.5809832811355591,
|
| 3817 |
+
"learning_rate": 0.0001353623692332891,
|
| 3818 |
+
"loss": 1.2896,
|
| 3819 |
+
"step": 542
|
| 3820 |
+
},
|
| 3821 |
+
{
|
| 3822 |
+
"epoch": 0.30896159317211946,
|
| 3823 |
+
"grad_norm": 0.5262885689735413,
|
| 3824 |
+
"learning_rate": 0.00013514964365224206,
|
| 3825 |
+
"loss": 1.4799,
|
| 3826 |
+
"step": 543
|
| 3827 |
+
},
|
| 3828 |
+
{
|
| 3829 |
+
"epoch": 0.30953058321479376,
|
| 3830 |
+
"grad_norm": 0.5609673261642456,
|
| 3831 |
+
"learning_rate": 0.00013493673643416218,
|
| 3832 |
+
"loss": 1.461,
|
| 3833 |
+
"step": 544
|
| 3834 |
+
},
|
| 3835 |
+
{
|
| 3836 |
+
"epoch": 0.310099573257468,
|
| 3837 |
+
"grad_norm": 0.5489050149917603,
|
| 3838 |
+
"learning_rate": 0.0001347236486792551,
|
| 3839 |
+
"loss": 1.3912,
|
| 3840 |
+
"step": 545
|
| 3841 |
+
},
|
| 3842 |
+
{
|
| 3843 |
+
"epoch": 0.31066856330014225,
|
| 3844 |
+
"grad_norm": 0.55717533826828,
|
| 3845 |
+
"learning_rate": 0.0001345103814886593,
|
| 3846 |
+
"loss": 1.4207,
|
| 3847 |
+
"step": 546
|
| 3848 |
+
},
|
| 3849 |
+
{
|
| 3850 |
+
"epoch": 0.3112375533428165,
|
| 3851 |
+
"grad_norm": 0.5326306819915771,
|
| 3852 |
+
"learning_rate": 0.00013429693596444067,
|
| 3853 |
+
"loss": 1.563,
|
| 3854 |
+
"step": 547
|
| 3855 |
+
},
|
| 3856 |
+
{
|
| 3857 |
+
"epoch": 0.31180654338549074,
|
| 3858 |
+
"grad_norm": 0.5783535838127136,
|
| 3859 |
+
"learning_rate": 0.00013408331320958648,
|
| 3860 |
+
"loss": 1.4829,
|
| 3861 |
+
"step": 548
|
| 3862 |
+
},
|
| 3863 |
+
{
|
| 3864 |
+
"epoch": 0.312375533428165,
|
| 3865 |
+
"grad_norm": 0.5628453493118286,
|
| 3866 |
+
"learning_rate": 0.00013386951432799987,
|
| 3867 |
+
"loss": 1.4815,
|
| 3868 |
+
"step": 549
|
| 3869 |
+
},
|
| 3870 |
+
{
|
| 3871 |
+
"epoch": 0.3129445234708393,
|
| 3872 |
+
"grad_norm": 0.5468215346336365,
|
| 3873 |
+
"learning_rate": 0.00013365554042449427,
|
| 3874 |
+
"loss": 1.3575,
|
| 3875 |
+
"step": 550
|
| 3876 |
+
},
|
| 3877 |
+
{
|
| 3878 |
+
"epoch": 0.31351351351351353,
|
| 3879 |
+
"grad_norm": 0.5711040496826172,
|
| 3880 |
+
"learning_rate": 0.00013344139260478732,
|
| 3881 |
+
"loss": 1.5833,
|
| 3882 |
+
"step": 551
|
| 3883 |
+
},
|
| 3884 |
+
{
|
| 3885 |
+
"epoch": 0.3140825035561878,
|
| 3886 |
+
"grad_norm": 0.5313072204589844,
|
| 3887 |
+
"learning_rate": 0.00013322707197549555,
|
| 3888 |
+
"loss": 1.5447,
|
| 3889 |
+
"step": 552
|
| 3890 |
+
},
|
| 3891 |
+
{
|
| 3892 |
+
"epoch": 0.314651493598862,
|
| 3893 |
+
"grad_norm": 0.6006999015808105,
|
| 3894 |
+
"learning_rate": 0.00013301257964412844,
|
| 3895 |
+
"loss": 1.747,
|
| 3896 |
+
"step": 553
|
| 3897 |
+
},
|
| 3898 |
+
{
|
| 3899 |
+
"epoch": 0.31522048364153626,
|
| 3900 |
+
"grad_norm": 0.6007615923881531,
|
| 3901 |
+
"learning_rate": 0.00013279791671908268,
|
| 3902 |
+
"loss": 1.5486,
|
| 3903 |
+
"step": 554
|
| 3904 |
+
},
|
| 3905 |
+
{
|
| 3906 |
+
"epoch": 0.3157894736842105,
|
| 3907 |
+
"grad_norm": 0.553854763507843,
|
| 3908 |
+
"learning_rate": 0.00013258308430963664,
|
| 3909 |
+
"loss": 1.4473,
|
| 3910 |
+
"step": 555
|
| 3911 |
+
},
|
| 3912 |
+
{
|
| 3913 |
+
"epoch": 0.31635846372688475,
|
| 3914 |
+
"grad_norm": 0.5920282006263733,
|
| 3915 |
+
"learning_rate": 0.00013236808352594433,
|
| 3916 |
+
"loss": 1.4883,
|
| 3917 |
+
"step": 556
|
| 3918 |
+
},
|
| 3919 |
+
{
|
| 3920 |
+
"epoch": 0.31692745376955905,
|
| 3921 |
+
"grad_norm": 0.5819621682167053,
|
| 3922 |
+
"learning_rate": 0.00013215291547903006,
|
| 3923 |
+
"loss": 1.4925,
|
| 3924 |
+
"step": 557
|
| 3925 |
+
},
|
| 3926 |
+
{
|
| 3927 |
+
"epoch": 0.3174964438122333,
|
| 3928 |
+
"grad_norm": 0.5728132128715515,
|
| 3929 |
+
"learning_rate": 0.0001319375812807823,
|
| 3930 |
+
"loss": 1.3921,
|
| 3931 |
+
"step": 558
|
| 3932 |
+
},
|
| 3933 |
+
{
|
| 3934 |
+
"epoch": 0.31806543385490754,
|
| 3935 |
+
"grad_norm": 0.6309751868247986,
|
| 3936 |
+
"learning_rate": 0.0001317220820439481,
|
| 3937 |
+
"loss": 1.6893,
|
| 3938 |
+
"step": 559
|
| 3939 |
+
},
|
| 3940 |
+
{
|
| 3941 |
+
"epoch": 0.3186344238975818,
|
| 3942 |
+
"grad_norm": 0.5545490384101868,
|
| 3943 |
+
"learning_rate": 0.00013150641888212756,
|
| 3944 |
+
"loss": 1.4053,
|
| 3945 |
+
"step": 560
|
| 3946 |
+
},
|
| 3947 |
+
{
|
| 3948 |
+
"epoch": 0.31920341394025603,
|
| 3949 |
+
"grad_norm": 0.5476984977722168,
|
| 3950 |
+
"learning_rate": 0.00013129059290976767,
|
| 3951 |
+
"loss": 1.3499,
|
| 3952 |
+
"step": 561
|
| 3953 |
+
},
|
| 3954 |
+
{
|
| 3955 |
+
"epoch": 0.3197724039829303,
|
| 3956 |
+
"grad_norm": 0.5255653262138367,
|
| 3957 |
+
"learning_rate": 0.00013107460524215678,
|
| 3958 |
+
"loss": 1.318,
|
| 3959 |
+
"step": 562
|
| 3960 |
+
},
|
| 3961 |
+
{
|
| 3962 |
+
"epoch": 0.3203413940256046,
|
| 3963 |
+
"grad_norm": 0.649142861366272,
|
| 3964 |
+
"learning_rate": 0.0001308584569954189,
|
| 3965 |
+
"loss": 1.6503,
|
| 3966 |
+
"step": 563
|
| 3967 |
+
},
|
| 3968 |
+
{
|
| 3969 |
+
"epoch": 0.3209103840682788,
|
| 3970 |
+
"grad_norm": 0.5934924483299255,
|
| 3971 |
+
"learning_rate": 0.0001306421492865077,
|
| 3972 |
+
"loss": 1.5933,
|
| 3973 |
+
"step": 564
|
| 3974 |
+
},
|
| 3975 |
+
{
|
| 3976 |
+
"epoch": 0.32147937411095306,
|
| 3977 |
+
"grad_norm": 0.5277055501937866,
|
| 3978 |
+
"learning_rate": 0.00013042568323320107,
|
| 3979 |
+
"loss": 1.4174,
|
| 3980 |
+
"step": 565
|
| 3981 |
+
},
|
| 3982 |
+
{
|
| 3983 |
+
"epoch": 0.3220483641536273,
|
| 3984 |
+
"grad_norm": 0.5566196441650391,
|
| 3985 |
+
"learning_rate": 0.00013020905995409497,
|
| 3986 |
+
"loss": 1.4713,
|
| 3987 |
+
"step": 566
|
| 3988 |
+
},
|
| 3989 |
+
{
|
| 3990 |
+
"epoch": 0.32261735419630155,
|
| 3991 |
+
"grad_norm": 0.5719363689422607,
|
| 3992 |
+
"learning_rate": 0.00012999228056859784,
|
| 3993 |
+
"loss": 1.5238,
|
| 3994 |
+
"step": 567
|
| 3995 |
+
},
|
| 3996 |
+
{
|
| 3997 |
+
"epoch": 0.3231863442389758,
|
| 3998 |
+
"grad_norm": 0.5720301866531372,
|
| 3999 |
+
"learning_rate": 0.00012977534619692494,
|
| 4000 |
+
"loss": 1.5374,
|
| 4001 |
+
"step": 568
|
| 4002 |
+
},
|
| 4003 |
+
{
|
| 4004 |
+
"epoch": 0.3237553342816501,
|
| 4005 |
+
"grad_norm": 0.5727265477180481,
|
| 4006 |
+
"learning_rate": 0.0001295582579600923,
|
| 4007 |
+
"loss": 1.4789,
|
| 4008 |
+
"step": 569
|
| 4009 |
+
},
|
| 4010 |
+
{
|
| 4011 |
+
"epoch": 0.32432432432432434,
|
| 4012 |
+
"grad_norm": 0.5553936958312988,
|
| 4013 |
+
"learning_rate": 0.00012934101697991115,
|
| 4014 |
+
"loss": 1.2535,
|
| 4015 |
+
"step": 570
|
| 4016 |
+
},
|
| 4017 |
+
{
|
| 4018 |
+
"epoch": 0.3248933143669986,
|
| 4019 |
+
"grad_norm": 0.5490901470184326,
|
| 4020 |
+
"learning_rate": 0.00012912362437898192,
|
| 4021 |
+
"loss": 1.4513,
|
| 4022 |
+
"step": 571
|
| 4023 |
+
},
|
| 4024 |
+
{
|
| 4025 |
+
"epoch": 0.32546230440967283,
|
| 4026 |
+
"grad_norm": 0.5691761374473572,
|
| 4027 |
+
"learning_rate": 0.0001289060812806886,
|
| 4028 |
+
"loss": 1.5947,
|
| 4029 |
+
"step": 572
|
| 4030 |
+
},
|
| 4031 |
+
{
|
| 4032 |
+
"epoch": 0.3260312944523471,
|
| 4033 |
+
"grad_norm": 0.5883947610855103,
|
| 4034 |
+
"learning_rate": 0.00012868838880919294,
|
| 4035 |
+
"loss": 1.3175,
|
| 4036 |
+
"step": 573
|
| 4037 |
+
},
|
| 4038 |
+
{
|
| 4039 |
+
"epoch": 0.3266002844950213,
|
| 4040 |
+
"grad_norm": 0.5340852737426758,
|
| 4041 |
+
"learning_rate": 0.00012847054808942847,
|
| 4042 |
+
"loss": 1.1903,
|
| 4043 |
+
"step": 574
|
| 4044 |
+
},
|
| 4045 |
+
{
|
| 4046 |
+
"epoch": 0.32716927453769556,
|
| 4047 |
+
"grad_norm": 0.5509372353553772,
|
| 4048 |
+
"learning_rate": 0.0001282525602470949,
|
| 4049 |
+
"loss": 1.5289,
|
| 4050 |
+
"step": 575
|
| 4051 |
+
},
|
| 4052 |
+
{
|
| 4053 |
+
"epoch": 0.32773826458036986,
|
| 4054 |
+
"grad_norm": 0.5860341191291809,
|
| 4055 |
+
"learning_rate": 0.00012803442640865208,
|
| 4056 |
+
"loss": 1.6618,
|
| 4057 |
+
"step": 576
|
| 4058 |
+
},
|
| 4059 |
+
{
|
| 4060 |
+
"epoch": 0.3283072546230441,
|
| 4061 |
+
"grad_norm": 0.540502667427063,
|
| 4062 |
+
"learning_rate": 0.00012781614770131442,
|
| 4063 |
+
"loss": 1.5062,
|
| 4064 |
+
"step": 577
|
| 4065 |
+
},
|
| 4066 |
+
{
|
| 4067 |
+
"epoch": 0.32887624466571835,
|
| 4068 |
+
"grad_norm": 0.5500742793083191,
|
| 4069 |
+
"learning_rate": 0.00012759772525304492,
|
| 4070 |
+
"loss": 1.6137,
|
| 4071 |
+
"step": 578
|
| 4072 |
+
},
|
| 4073 |
+
{
|
| 4074 |
+
"epoch": 0.3294452347083926,
|
| 4075 |
+
"grad_norm": 0.550717830657959,
|
| 4076 |
+
"learning_rate": 0.00012737916019254933,
|
| 4077 |
+
"loss": 1.6204,
|
| 4078 |
+
"step": 579
|
| 4079 |
+
},
|
| 4080 |
+
{
|
| 4081 |
+
"epoch": 0.33001422475106684,
|
| 4082 |
+
"grad_norm": 0.5424780249595642,
|
| 4083 |
+
"learning_rate": 0.00012716045364927035,
|
| 4084 |
+
"loss": 1.3499,
|
| 4085 |
+
"step": 580
|
| 4086 |
+
},
|
| 4087 |
+
{
|
| 4088 |
+
"epoch": 0.3305832147937411,
|
| 4089 |
+
"grad_norm": 0.5449280142784119,
|
| 4090 |
+
"learning_rate": 0.0001269416067533818,
|
| 4091 |
+
"loss": 1.518,
|
| 4092 |
+
"step": 581
|
| 4093 |
+
},
|
| 4094 |
+
{
|
| 4095 |
+
"epoch": 0.3311522048364154,
|
| 4096 |
+
"grad_norm": 0.5500824451446533,
|
| 4097 |
+
"learning_rate": 0.0001267226206357828,
|
| 4098 |
+
"loss": 1.6019,
|
| 4099 |
+
"step": 582
|
| 4100 |
+
},
|
| 4101 |
+
{
|
| 4102 |
+
"epoch": 0.33172119487908963,
|
| 4103 |
+
"grad_norm": 0.5455232262611389,
|
| 4104 |
+
"learning_rate": 0.00012650349642809197,
|
| 4105 |
+
"loss": 1.5048,
|
| 4106 |
+
"step": 583
|
| 4107 |
+
},
|
| 4108 |
+
{
|
| 4109 |
+
"epoch": 0.3322901849217639,
|
| 4110 |
+
"grad_norm": 0.5600374937057495,
|
| 4111 |
+
"learning_rate": 0.00012628423526264134,
|
| 4112 |
+
"loss": 1.4539,
|
| 4113 |
+
"step": 584
|
| 4114 |
+
},
|
| 4115 |
+
{
|
| 4116 |
+
"epoch": 0.3328591749644381,
|
| 4117 |
+
"grad_norm": 0.5611444115638733,
|
| 4118 |
+
"learning_rate": 0.0001260648382724708,
|
| 4119 |
+
"loss": 1.4871,
|
| 4120 |
+
"step": 585
|
| 4121 |
+
},
|
| 4122 |
+
{
|
| 4123 |
+
"epoch": 0.33342816500711236,
|
| 4124 |
+
"grad_norm": 0.5722511410713196,
|
| 4125 |
+
"learning_rate": 0.00012584530659132215,
|
| 4126 |
+
"loss": 1.4491,
|
| 4127 |
+
"step": 586
|
| 4128 |
+
},
|
| 4129 |
+
{
|
| 4130 |
+
"epoch": 0.3339971550497866,
|
| 4131 |
+
"grad_norm": 0.5913495421409607,
|
| 4132 |
+
"learning_rate": 0.00012562564135363313,
|
| 4133 |
+
"loss": 1.136,
|
| 4134 |
+
"step": 587
|
| 4135 |
+
},
|
| 4136 |
+
{
|
| 4137 |
+
"epoch": 0.3345661450924609,
|
| 4138 |
+
"grad_norm": 0.578739583492279,
|
| 4139 |
+
"learning_rate": 0.00012540584369453162,
|
| 4140 |
+
"loss": 1.3503,
|
| 4141 |
+
"step": 588
|
| 4142 |
+
},
|
| 4143 |
+
{
|
| 4144 |
+
"epoch": 0.33513513513513515,
|
| 4145 |
+
"grad_norm": 0.5618348717689514,
|
| 4146 |
+
"learning_rate": 0.00012518591474982985,
|
| 4147 |
+
"loss": 1.5827,
|
| 4148 |
+
"step": 589
|
| 4149 |
+
},
|
| 4150 |
+
{
|
| 4151 |
+
"epoch": 0.3357041251778094,
|
| 4152 |
+
"grad_norm": 0.5958595871925354,
|
| 4153 |
+
"learning_rate": 0.00012496585565601853,
|
| 4154 |
+
"loss": 1.6305,
|
| 4155 |
+
"step": 590
|
| 4156 |
+
},
|
| 4157 |
+
{
|
| 4158 |
+
"epoch": 0.33627311522048364,
|
| 4159 |
+
"grad_norm": 0.5362867116928101,
|
| 4160 |
+
"learning_rate": 0.00012474566755026073,
|
| 4161 |
+
"loss": 1.416,
|
| 4162 |
+
"step": 591
|
| 4163 |
+
},
|
| 4164 |
+
{
|
| 4165 |
+
"epoch": 0.3368421052631579,
|
| 4166 |
+
"grad_norm": 0.5598848462104797,
|
| 4167 |
+
"learning_rate": 0.00012452535157038641,
|
| 4168 |
+
"loss": 1.4456,
|
| 4169 |
+
"step": 592
|
| 4170 |
+
},
|
| 4171 |
+
{
|
| 4172 |
+
"epoch": 0.33741109530583213,
|
| 4173 |
+
"grad_norm": 0.5422506332397461,
|
| 4174 |
+
"learning_rate": 0.00012430490885488617,
|
| 4175 |
+
"loss": 1.3472,
|
| 4176 |
+
"step": 593
|
| 4177 |
+
},
|
| 4178 |
+
{
|
| 4179 |
+
"epoch": 0.3379800853485064,
|
| 4180 |
+
"grad_norm": 0.5901892781257629,
|
| 4181 |
+
"learning_rate": 0.00012408434054290561,
|
| 4182 |
+
"loss": 1.5748,
|
| 4183 |
+
"step": 594
|
| 4184 |
+
},
|
| 4185 |
+
{
|
| 4186 |
+
"epoch": 0.3385490753911807,
|
| 4187 |
+
"grad_norm": 0.5219245553016663,
|
| 4188 |
+
"learning_rate": 0.00012386364777423932,
|
| 4189 |
+
"loss": 1.3369,
|
| 4190 |
+
"step": 595
|
| 4191 |
+
},
|
| 4192 |
+
{
|
| 4193 |
+
"epoch": 0.3391180654338549,
|
| 4194 |
+
"grad_norm": 0.5885049104690552,
|
| 4195 |
+
"learning_rate": 0.00012364283168932495,
|
| 4196 |
+
"loss": 1.5212,
|
| 4197 |
+
"step": 596
|
| 4198 |
+
},
|
| 4199 |
+
{
|
| 4200 |
+
"epoch": 0.33968705547652916,
|
| 4201 |
+
"grad_norm": 0.5666311383247375,
|
| 4202 |
+
"learning_rate": 0.0001234218934292376,
|
| 4203 |
+
"loss": 1.5041,
|
| 4204 |
+
"step": 597
|
| 4205 |
+
},
|
| 4206 |
+
{
|
| 4207 |
+
"epoch": 0.3402560455192034,
|
| 4208 |
+
"grad_norm": 0.6065592765808105,
|
| 4209 |
+
"learning_rate": 0.0001232008341356835,
|
| 4210 |
+
"loss": 1.5489,
|
| 4211 |
+
"step": 598
|
| 4212 |
+
},
|
| 4213 |
+
{
|
| 4214 |
+
"epoch": 0.34082503556187765,
|
| 4215 |
+
"grad_norm": 0.6251218914985657,
|
| 4216 |
+
"learning_rate": 0.0001229796549509944,
|
| 4217 |
+
"loss": 1.5043,
|
| 4218 |
+
"step": 599
|
| 4219 |
+
},
|
| 4220 |
+
{
|
| 4221 |
+
"epoch": 0.3413940256045519,
|
| 4222 |
+
"grad_norm": 0.562077522277832,
|
| 4223 |
+
"learning_rate": 0.00012275835701812163,
|
| 4224 |
+
"loss": 1.547,
|
| 4225 |
+
"step": 600
|
| 4226 |
+
},
|
| 4227 |
+
{
|
| 4228 |
+
"epoch": 0.3419630156472262,
|
| 4229 |
+
"grad_norm": 0.5375682711601257,
|
| 4230 |
+
"learning_rate": 0.00012253694148063013,
|
| 4231 |
+
"loss": 1.3999,
|
| 4232 |
+
"step": 601
|
| 4233 |
+
},
|
| 4234 |
+
{
|
| 4235 |
+
"epoch": 0.34253200568990044,
|
| 4236 |
+
"grad_norm": 0.583003044128418,
|
| 4237 |
+
"learning_rate": 0.0001223154094826925,
|
| 4238 |
+
"loss": 1.641,
|
| 4239 |
+
"step": 602
|
| 4240 |
+
},
|
| 4241 |
+
{
|
| 4242 |
+
"epoch": 0.3431009957325747,
|
| 4243 |
+
"grad_norm": 0.619719922542572,
|
| 4244 |
+
"learning_rate": 0.00012209376216908328,
|
| 4245 |
+
"loss": 1.5772,
|
| 4246 |
+
"step": 603
|
| 4247 |
+
},
|
| 4248 |
+
{
|
| 4249 |
+
"epoch": 0.34366998577524893,
|
| 4250 |
+
"grad_norm": 0.5548385977745056,
|
| 4251 |
+
"learning_rate": 0.00012187200068517277,
|
| 4252 |
+
"loss": 1.4802,
|
| 4253 |
+
"step": 604
|
| 4254 |
+
},
|
| 4255 |
+
{
|
| 4256 |
+
"epoch": 0.3442389758179232,
|
| 4257 |
+
"grad_norm": 0.5717220902442932,
|
| 4258 |
+
"learning_rate": 0.00012165012617692143,
|
| 4259 |
+
"loss": 1.533,
|
| 4260 |
+
"step": 605
|
| 4261 |
+
},
|
| 4262 |
+
{
|
| 4263 |
+
"epoch": 0.3448079658605974,
|
| 4264 |
+
"grad_norm": 0.5915637016296387,
|
| 4265 |
+
"learning_rate": 0.00012142813979087356,
|
| 4266 |
+
"loss": 1.4618,
|
| 4267 |
+
"step": 606
|
| 4268 |
+
},
|
| 4269 |
+
{
|
| 4270 |
+
"epoch": 0.34537695590327167,
|
| 4271 |
+
"grad_norm": 0.5780906081199646,
|
| 4272 |
+
"learning_rate": 0.00012120604267415172,
|
| 4273 |
+
"loss": 1.428,
|
| 4274 |
+
"step": 607
|
| 4275 |
+
},
|
| 4276 |
+
{
|
| 4277 |
+
"epoch": 0.34594594594594597,
|
| 4278 |
+
"grad_norm": 0.6107869744300842,
|
| 4279 |
+
"learning_rate": 0.0001209838359744507,
|
| 4280 |
+
"loss": 1.6056,
|
| 4281 |
+
"step": 608
|
| 4282 |
+
},
|
| 4283 |
+
{
|
| 4284 |
+
"epoch": 0.3465149359886202,
|
| 4285 |
+
"grad_norm": 0.5807276368141174,
|
| 4286 |
+
"learning_rate": 0.0001207615208400315,
|
| 4287 |
+
"loss": 1.4344,
|
| 4288 |
+
"step": 609
|
| 4289 |
+
},
|
| 4290 |
+
{
|
| 4291 |
+
"epoch": 0.34708392603129445,
|
| 4292 |
+
"grad_norm": 0.5761096477508545,
|
| 4293 |
+
"learning_rate": 0.00012053909841971547,
|
| 4294 |
+
"loss": 1.6409,
|
| 4295 |
+
"step": 610
|
| 4296 |
+
},
|
| 4297 |
+
{
|
| 4298 |
+
"epoch": 0.3476529160739687,
|
| 4299 |
+
"grad_norm": 0.5648180246353149,
|
| 4300 |
+
"learning_rate": 0.00012031656986287835,
|
| 4301 |
+
"loss": 1.5207,
|
| 4302 |
+
"step": 611
|
| 4303 |
+
},
|
| 4304 |
+
{
|
| 4305 |
+
"epoch": 0.34822190611664294,
|
| 4306 |
+
"grad_norm": 0.5846616625785828,
|
| 4307 |
+
"learning_rate": 0.00012009393631944439,
|
| 4308 |
+
"loss": 1.709,
|
| 4309 |
+
"step": 612
|
| 4310 |
+
},
|
| 4311 |
+
{
|
| 4312 |
+
"epoch": 0.3487908961593172,
|
| 4313 |
+
"grad_norm": 0.5779747366905212,
|
| 4314 |
+
"learning_rate": 0.00011987119893988035,
|
| 4315 |
+
"loss": 1.5626,
|
| 4316 |
+
"step": 613
|
| 4317 |
+
},
|
| 4318 |
+
{
|
| 4319 |
+
"epoch": 0.3493598862019915,
|
| 4320 |
+
"grad_norm": 0.5634474158287048,
|
| 4321 |
+
"learning_rate": 0.00011964835887518955,
|
| 4322 |
+
"loss": 1.645,
|
| 4323 |
+
"step": 614
|
| 4324 |
+
},
|
| 4325 |
+
{
|
| 4326 |
+
"epoch": 0.34992887624466573,
|
| 4327 |
+
"grad_norm": 0.5536413788795471,
|
| 4328 |
+
"learning_rate": 0.00011942541727690593,
|
| 4329 |
+
"loss": 1.4927,
|
| 4330 |
+
"step": 615
|
| 4331 |
+
},
|
| 4332 |
+
{
|
| 4333 |
+
"epoch": 0.35049786628734,
|
| 4334 |
+
"grad_norm": 0.5312451720237732,
|
| 4335 |
+
"learning_rate": 0.00011920237529708811,
|
| 4336 |
+
"loss": 1.3328,
|
| 4337 |
+
"step": 616
|
| 4338 |
+
},
|
| 4339 |
+
{
|
| 4340 |
+
"epoch": 0.3510668563300142,
|
| 4341 |
+
"grad_norm": 0.5960412621498108,
|
| 4342 |
+
"learning_rate": 0.00011897923408831346,
|
| 4343 |
+
"loss": 1.5827,
|
| 4344 |
+
"step": 617
|
| 4345 |
+
},
|
| 4346 |
+
{
|
| 4347 |
+
"epoch": 0.35163584637268847,
|
| 4348 |
+
"grad_norm": 0.598399817943573,
|
| 4349 |
+
"learning_rate": 0.00011875599480367215,
|
| 4350 |
+
"loss": 1.5477,
|
| 4351 |
+
"step": 618
|
| 4352 |
+
},
|
| 4353 |
+
{
|
| 4354 |
+
"epoch": 0.3522048364153627,
|
| 4355 |
+
"grad_norm": 0.517993688583374,
|
| 4356 |
+
"learning_rate": 0.00011853265859676108,
|
| 4357 |
+
"loss": 1.3741,
|
| 4358 |
+
"step": 619
|
| 4359 |
+
},
|
| 4360 |
+
{
|
| 4361 |
+
"epoch": 0.352773826458037,
|
| 4362 |
+
"grad_norm": 0.5564917922019958,
|
| 4363 |
+
"learning_rate": 0.00011830922662167803,
|
| 4364 |
+
"loss": 1.3112,
|
| 4365 |
+
"step": 620
|
| 4366 |
+
},
|
| 4367 |
+
{
|
| 4368 |
+
"epoch": 0.35334281650071125,
|
| 4369 |
+
"grad_norm": 0.5626814961433411,
|
| 4370 |
+
"learning_rate": 0.00011808570003301566,
|
| 4371 |
+
"loss": 1.5272,
|
| 4372 |
+
"step": 621
|
| 4373 |
+
},
|
| 4374 |
+
{
|
| 4375 |
+
"epoch": 0.3539118065433855,
|
| 4376 |
+
"grad_norm": 0.6245387196540833,
|
| 4377 |
+
"learning_rate": 0.00011786207998585559,
|
| 4378 |
+
"loss": 1.433,
|
| 4379 |
+
"step": 622
|
| 4380 |
+
},
|
| 4381 |
+
{
|
| 4382 |
+
"epoch": 0.35448079658605974,
|
| 4383 |
+
"grad_norm": 0.5711420178413391,
|
| 4384 |
+
"learning_rate": 0.00011763836763576237,
|
| 4385 |
+
"loss": 1.4975,
|
| 4386 |
+
"step": 623
|
| 4387 |
+
},
|
| 4388 |
+
{
|
| 4389 |
+
"epoch": 0.355049786628734,
|
| 4390 |
+
"grad_norm": 0.5550587177276611,
|
| 4391 |
+
"learning_rate": 0.00011741456413877749,
|
| 4392 |
+
"loss": 1.3973,
|
| 4393 |
+
"step": 624
|
| 4394 |
+
},
|
| 4395 |
+
{
|
| 4396 |
+
"epoch": 0.35561877667140823,
|
| 4397 |
+
"grad_norm": 0.583817183971405,
|
| 4398 |
+
"learning_rate": 0.00011719067065141352,
|
| 4399 |
+
"loss": 1.4535,
|
| 4400 |
+
"step": 625
|
| 4401 |
+
},
|
| 4402 |
+
{
|
| 4403 |
+
"epoch": 0.3561877667140825,
|
| 4404 |
+
"grad_norm": 0.5912776589393616,
|
| 4405 |
+
"learning_rate": 0.00011696668833064795,
|
| 4406 |
+
"loss": 1.5161,
|
| 4407 |
+
"step": 626
|
| 4408 |
+
},
|
| 4409 |
+
{
|
| 4410 |
+
"epoch": 0.3567567567567568,
|
| 4411 |
+
"grad_norm": 0.615287184715271,
|
| 4412 |
+
"learning_rate": 0.0001167426183339174,
|
| 4413 |
+
"loss": 1.6331,
|
| 4414 |
+
"step": 627
|
| 4415 |
+
},
|
| 4416 |
+
{
|
| 4417 |
+
"epoch": 0.357325746799431,
|
| 4418 |
+
"grad_norm": 0.5431495308876038,
|
| 4419 |
+
"learning_rate": 0.00011651846181911161,
|
| 4420 |
+
"loss": 1.5279,
|
| 4421 |
+
"step": 628
|
| 4422 |
+
},
|
| 4423 |
+
{
|
| 4424 |
+
"epoch": 0.35789473684210527,
|
| 4425 |
+
"grad_norm": 0.5510687232017517,
|
| 4426 |
+
"learning_rate": 0.00011629421994456723,
|
| 4427 |
+
"loss": 1.5859,
|
| 4428 |
+
"step": 629
|
| 4429 |
+
},
|
| 4430 |
+
{
|
| 4431 |
+
"epoch": 0.3584637268847795,
|
| 4432 |
+
"grad_norm": 0.5746335983276367,
|
| 4433 |
+
"learning_rate": 0.0001160698938690622,
|
| 4434 |
+
"loss": 1.4053,
|
| 4435 |
+
"step": 630
|
| 4436 |
+
},
|
| 4437 |
+
{
|
| 4438 |
+
"epoch": 0.35903271692745375,
|
| 4439 |
+
"grad_norm": 0.5783334374427795,
|
| 4440 |
+
"learning_rate": 0.00011584548475180943,
|
| 4441 |
+
"loss": 1.6259,
|
| 4442 |
+
"step": 631
|
| 4443 |
+
},
|
| 4444 |
+
{
|
| 4445 |
+
"epoch": 0.359601706970128,
|
| 4446 |
+
"grad_norm": 0.5857696533203125,
|
| 4447 |
+
"learning_rate": 0.00011562099375245108,
|
| 4448 |
+
"loss": 1.4625,
|
| 4449 |
+
"step": 632
|
| 4450 |
+
},
|
| 4451 |
+
{
|
| 4452 |
+
"epoch": 0.3601706970128023,
|
| 4453 |
+
"grad_norm": 0.580596387386322,
|
| 4454 |
+
"learning_rate": 0.00011539642203105232,
|
| 4455 |
+
"loss": 1.511,
|
| 4456 |
+
"step": 633
|
| 4457 |
+
},
|
| 4458 |
+
{
|
| 4459 |
+
"epoch": 0.36073968705547654,
|
| 4460 |
+
"grad_norm": 0.5730242729187012,
|
| 4461 |
+
"learning_rate": 0.00011517177074809546,
|
| 4462 |
+
"loss": 1.6307,
|
| 4463 |
+
"step": 634
|
| 4464 |
+
},
|
| 4465 |
+
{
|
| 4466 |
+
"epoch": 0.3613086770981508,
|
| 4467 |
+
"grad_norm": 0.567469596862793,
|
| 4468 |
+
"learning_rate": 0.0001149470410644741,
|
| 4469 |
+
"loss": 1.5477,
|
| 4470 |
+
"step": 635
|
| 4471 |
+
},
|
| 4472 |
+
{
|
| 4473 |
+
"epoch": 0.36187766714082503,
|
| 4474 |
+
"grad_norm": 0.5704171061515808,
|
| 4475 |
+
"learning_rate": 0.00011472223414148675,
|
| 4476 |
+
"loss": 1.4716,
|
| 4477 |
+
"step": 636
|
| 4478 |
+
},
|
| 4479 |
+
{
|
| 4480 |
+
"epoch": 0.3624466571834993,
|
| 4481 |
+
"grad_norm": 0.5398246645927429,
|
| 4482 |
+
"learning_rate": 0.00011449735114083127,
|
| 4483 |
+
"loss": 1.6304,
|
| 4484 |
+
"step": 637
|
| 4485 |
+
},
|
| 4486 |
+
{
|
| 4487 |
+
"epoch": 0.3630156472261735,
|
| 4488 |
+
"grad_norm": 0.5576680898666382,
|
| 4489 |
+
"learning_rate": 0.0001142723932245985,
|
| 4490 |
+
"loss": 1.4775,
|
| 4491 |
+
"step": 638
|
| 4492 |
+
},
|
| 4493 |
+
{
|
| 4494 |
+
"epoch": 0.3635846372688478,
|
| 4495 |
+
"grad_norm": 0.5728341341018677,
|
| 4496 |
+
"learning_rate": 0.00011404736155526645,
|
| 4497 |
+
"loss": 1.6101,
|
| 4498 |
+
"step": 639
|
| 4499 |
+
},
|
| 4500 |
+
{
|
| 4501 |
+
"epoch": 0.36415362731152207,
|
| 4502 |
+
"grad_norm": 0.54744553565979,
|
| 4503 |
+
"learning_rate": 0.00011382225729569436,
|
| 4504 |
+
"loss": 1.2536,
|
| 4505 |
+
"step": 640
|
| 4506 |
+
},
|
| 4507 |
+
{
|
| 4508 |
+
"epoch": 0.3647226173541963,
|
| 4509 |
+
"grad_norm": 0.5593659281730652,
|
| 4510 |
+
"learning_rate": 0.00011359708160911641,
|
| 4511 |
+
"loss": 1.4138,
|
| 4512 |
+
"step": 641
|
| 4513 |
+
},
|
| 4514 |
+
{
|
| 4515 |
+
"epoch": 0.36529160739687055,
|
| 4516 |
+
"grad_norm": 0.5415304899215698,
|
| 4517 |
+
"learning_rate": 0.00011337183565913599,
|
| 4518 |
+
"loss": 1.5221,
|
| 4519 |
+
"step": 642
|
| 4520 |
+
},
|
| 4521 |
+
{
|
| 4522 |
+
"epoch": 0.3658605974395448,
|
| 4523 |
+
"grad_norm": 0.5653886198997498,
|
| 4524 |
+
"learning_rate": 0.00011314652060971955,
|
| 4525 |
+
"loss": 1.5221,
|
| 4526 |
+
"step": 643
|
| 4527 |
+
},
|
| 4528 |
+
{
|
| 4529 |
+
"epoch": 0.36642958748221904,
|
| 4530 |
+
"grad_norm": 0.5842243432998657,
|
| 4531 |
+
"learning_rate": 0.00011292113762519061,
|
| 4532 |
+
"loss": 1.501,
|
| 4533 |
+
"step": 644
|
| 4534 |
+
},
|
| 4535 |
+
{
|
| 4536 |
+
"epoch": 0.3669985775248933,
|
| 4537 |
+
"grad_norm": 0.5919954180717468,
|
| 4538 |
+
"learning_rate": 0.00011269568787022376,
|
| 4539 |
+
"loss": 1.5444,
|
| 4540 |
+
"step": 645
|
| 4541 |
+
},
|
| 4542 |
+
{
|
| 4543 |
+
"epoch": 0.3675675675675676,
|
| 4544 |
+
"grad_norm": 0.5867476463317871,
|
| 4545 |
+
"learning_rate": 0.00011247017250983865,
|
| 4546 |
+
"loss": 1.4897,
|
| 4547 |
+
"step": 646
|
| 4548 |
+
},
|
| 4549 |
+
{
|
| 4550 |
+
"epoch": 0.36813655761024183,
|
| 4551 |
+
"grad_norm": 0.5661168098449707,
|
| 4552 |
+
"learning_rate": 0.00011224459270939384,
|
| 4553 |
+
"loss": 1.3373,
|
| 4554 |
+
"step": 647
|
| 4555 |
+
},
|
| 4556 |
+
{
|
| 4557 |
+
"epoch": 0.3687055476529161,
|
| 4558 |
+
"grad_norm": 0.5516852736473083,
|
| 4559 |
+
"learning_rate": 0.00011201894963458106,
|
| 4560 |
+
"loss": 1.6209,
|
| 4561 |
+
"step": 648
|
| 4562 |
+
},
|
| 4563 |
+
{
|
| 4564 |
+
"epoch": 0.3692745376955903,
|
| 4565 |
+
"grad_norm": 0.615533709526062,
|
| 4566 |
+
"learning_rate": 0.00011179324445141883,
|
| 4567 |
+
"loss": 1.369,
|
| 4568 |
+
"step": 649
|
| 4569 |
+
},
|
| 4570 |
+
{
|
| 4571 |
+
"epoch": 0.36984352773826457,
|
| 4572 |
+
"grad_norm": 0.5543255805969238,
|
| 4573 |
+
"learning_rate": 0.00011156747832624679,
|
| 4574 |
+
"loss": 1.3172,
|
| 4575 |
+
"step": 650
|
| 4576 |
+
},
|
| 4577 |
+
{
|
| 4578 |
+
"epoch": 0.3704125177809388,
|
| 4579 |
+
"grad_norm": 0.5759336352348328,
|
| 4580 |
+
"learning_rate": 0.00011134165242571938,
|
| 4581 |
+
"loss": 1.5896,
|
| 4582 |
+
"step": 651
|
| 4583 |
+
},
|
| 4584 |
+
{
|
| 4585 |
+
"epoch": 0.3709815078236131,
|
| 4586 |
+
"grad_norm": 0.5587149858474731,
|
| 4587 |
+
"learning_rate": 0.00011111576791679994,
|
| 4588 |
+
"loss": 1.5963,
|
| 4589 |
+
"step": 652
|
| 4590 |
+
},
|
| 4591 |
+
{
|
| 4592 |
+
"epoch": 0.37155049786628735,
|
| 4593 |
+
"grad_norm": 0.5666396617889404,
|
| 4594 |
+
"learning_rate": 0.00011088982596675475,
|
| 4595 |
+
"loss": 1.5253,
|
| 4596 |
+
"step": 653
|
| 4597 |
+
},
|
| 4598 |
+
{
|
| 4599 |
+
"epoch": 0.3721194879089616,
|
| 4600 |
+
"grad_norm": 0.5888431668281555,
|
| 4601 |
+
"learning_rate": 0.00011066382774314683,
|
| 4602 |
+
"loss": 1.4419,
|
| 4603 |
+
"step": 654
|
| 4604 |
+
},
|
| 4605 |
+
{
|
| 4606 |
+
"epoch": 0.37268847795163584,
|
| 4607 |
+
"grad_norm": 0.5519063472747803,
|
| 4608 |
+
"learning_rate": 0.00011043777441383006,
|
| 4609 |
+
"loss": 1.5396,
|
| 4610 |
+
"step": 655
|
| 4611 |
+
},
|
| 4612 |
+
{
|
| 4613 |
+
"epoch": 0.3732574679943101,
|
| 4614 |
+
"grad_norm": 0.5812383890151978,
|
| 4615 |
+
"learning_rate": 0.00011021166714694297,
|
| 4616 |
+
"loss": 1.2045,
|
| 4617 |
+
"step": 656
|
| 4618 |
+
},
|
| 4619 |
+
{
|
| 4620 |
+
"epoch": 0.37382645803698433,
|
| 4621 |
+
"grad_norm": 0.5881744623184204,
|
| 4622 |
+
"learning_rate": 0.000109985507110903,
|
| 4623 |
+
"loss": 1.4078,
|
| 4624 |
+
"step": 657
|
| 4625 |
+
},
|
| 4626 |
+
{
|
| 4627 |
+
"epoch": 0.37439544807965863,
|
| 4628 |
+
"grad_norm": 0.5681930184364319,
|
| 4629 |
+
"learning_rate": 0.00010975929547440016,
|
| 4630 |
+
"loss": 1.4739,
|
| 4631 |
+
"step": 658
|
| 4632 |
+
},
|
| 4633 |
+
{
|
| 4634 |
+
"epoch": 0.3749644381223329,
|
| 4635 |
+
"grad_norm": 0.5596330165863037,
|
| 4636 |
+
"learning_rate": 0.0001095330334063911,
|
| 4637 |
+
"loss": 1.4085,
|
| 4638 |
+
"step": 659
|
| 4639 |
+
},
|
| 4640 |
+
{
|
| 4641 |
+
"epoch": 0.3755334281650071,
|
| 4642 |
+
"grad_norm": 0.5785601139068604,
|
| 4643 |
+
"learning_rate": 0.00010930672207609306,
|
| 4644 |
+
"loss": 1.4087,
|
| 4645 |
+
"step": 660
|
| 4646 |
+
},
|
| 4647 |
+
{
|
| 4648 |
+
"epoch": 0.37610241820768137,
|
| 4649 |
+
"grad_norm": 0.5467891097068787,
|
| 4650 |
+
"learning_rate": 0.00010908036265297794,
|
| 4651 |
+
"loss": 1.6924,
|
| 4652 |
+
"step": 661
|
| 4653 |
+
},
|
| 4654 |
+
{
|
| 4655 |
+
"epoch": 0.3766714082503556,
|
| 4656 |
+
"grad_norm": 0.5449764132499695,
|
| 4657 |
+
"learning_rate": 0.00010885395630676607,
|
| 4658 |
+
"loss": 1.5254,
|
| 4659 |
+
"step": 662
|
| 4660 |
+
},
|
| 4661 |
+
{
|
| 4662 |
+
"epoch": 0.37724039829302985,
|
| 4663 |
+
"grad_norm": 0.5570394396781921,
|
| 4664 |
+
"learning_rate": 0.00010862750420742031,
|
| 4665 |
+
"loss": 1.4218,
|
| 4666 |
+
"step": 663
|
| 4667 |
+
},
|
| 4668 |
+
{
|
| 4669 |
+
"epoch": 0.3778093883357041,
|
| 4670 |
+
"grad_norm": 0.5946861505508423,
|
| 4671 |
+
"learning_rate": 0.00010840100752513996,
|
| 4672 |
+
"loss": 1.6474,
|
| 4673 |
+
"step": 664
|
| 4674 |
+
},
|
| 4675 |
+
{
|
| 4676 |
+
"epoch": 0.3783783783783784,
|
| 4677 |
+
"grad_norm": 0.545051097869873,
|
| 4678 |
+
"learning_rate": 0.00010817446743035462,
|
| 4679 |
+
"loss": 1.459,
|
| 4680 |
+
"step": 665
|
| 4681 |
+
},
|
| 4682 |
+
{
|
| 4683 |
+
"epoch": 0.37894736842105264,
|
| 4684 |
+
"grad_norm": 0.5713635683059692,
|
| 4685 |
+
"learning_rate": 0.00010794788509371829,
|
| 4686 |
+
"loss": 1.44,
|
| 4687 |
+
"step": 666
|
| 4688 |
+
},
|
| 4689 |
+
{
|
| 4690 |
+
"epoch": 0.3795163584637269,
|
| 4691 |
+
"grad_norm": 0.5865978598594666,
|
| 4692 |
+
"learning_rate": 0.00010772126168610325,
|
| 4693 |
+
"loss": 1.5968,
|
| 4694 |
+
"step": 667
|
| 4695 |
+
},
|
| 4696 |
+
{
|
| 4697 |
+
"epoch": 0.38008534850640113,
|
| 4698 |
+
"grad_norm": 0.5625496506690979,
|
| 4699 |
+
"learning_rate": 0.00010749459837859408,
|
| 4700 |
+
"loss": 1.4018,
|
| 4701 |
+
"step": 668
|
| 4702 |
+
},
|
| 4703 |
+
{
|
| 4704 |
+
"epoch": 0.3806543385490754,
|
| 4705 |
+
"grad_norm": 0.5960560441017151,
|
| 4706 |
+
"learning_rate": 0.00010726789634248137,
|
| 4707 |
+
"loss": 1.5808,
|
| 4708 |
+
"step": 669
|
| 4709 |
+
},
|
| 4710 |
+
{
|
| 4711 |
+
"epoch": 0.3812233285917496,
|
| 4712 |
+
"grad_norm": 0.6137279868125916,
|
| 4713 |
+
"learning_rate": 0.00010704115674925604,
|
| 4714 |
+
"loss": 1.212,
|
| 4715 |
+
"step": 670
|
| 4716 |
+
},
|
| 4717 |
+
{
|
| 4718 |
+
"epoch": 0.3817923186344239,
|
| 4719 |
+
"grad_norm": 0.5478764772415161,
|
| 4720 |
+
"learning_rate": 0.00010681438077060291,
|
| 4721 |
+
"loss": 1.4701,
|
| 4722 |
+
"step": 671
|
| 4723 |
+
},
|
| 4724 |
+
{
|
| 4725 |
+
"epoch": 0.38236130867709817,
|
| 4726 |
+
"grad_norm": 0.6135146021842957,
|
| 4727 |
+
"learning_rate": 0.000106587569578395,
|
| 4728 |
+
"loss": 1.5428,
|
| 4729 |
+
"step": 672
|
| 4730 |
+
},
|
| 4731 |
+
{
|
| 4732 |
+
"epoch": 0.3829302987197724,
|
| 4733 |
+
"grad_norm": 0.5707561373710632,
|
| 4734 |
+
"learning_rate": 0.00010636072434468714,
|
| 4735 |
+
"loss": 1.5299,
|
| 4736 |
+
"step": 673
|
| 4737 |
+
},
|
| 4738 |
+
{
|
| 4739 |
+
"epoch": 0.38349928876244666,
|
| 4740 |
+
"grad_norm": 0.529769778251648,
|
| 4741 |
+
"learning_rate": 0.00010613384624171016,
|
| 4742 |
+
"loss": 1.4161,
|
| 4743 |
+
"step": 674
|
| 4744 |
+
},
|
| 4745 |
+
{
|
| 4746 |
+
"epoch": 0.3840682788051209,
|
| 4747 |
+
"grad_norm": 0.5672623515129089,
|
| 4748 |
+
"learning_rate": 0.00010590693644186474,
|
| 4749 |
+
"loss": 1.5084,
|
| 4750 |
+
"step": 675
|
| 4751 |
+
},
|
| 4752 |
+
{
|
| 4753 |
+
"epoch": 0.38463726884779514,
|
| 4754 |
+
"grad_norm": 0.5277720093727112,
|
| 4755 |
+
"learning_rate": 0.00010567999611771528,
|
| 4756 |
+
"loss": 1.2255,
|
| 4757 |
+
"step": 676
|
| 4758 |
+
},
|
| 4759 |
+
{
|
| 4760 |
+
"epoch": 0.38520625889046944,
|
| 4761 |
+
"grad_norm": 0.5478918552398682,
|
| 4762 |
+
"learning_rate": 0.00010545302644198405,
|
| 4763 |
+
"loss": 1.3878,
|
| 4764 |
+
"step": 677
|
| 4765 |
+
},
|
| 4766 |
+
{
|
| 4767 |
+
"epoch": 0.3857752489331437,
|
| 4768 |
+
"grad_norm": 0.5412498712539673,
|
| 4769 |
+
"learning_rate": 0.00010522602858754487,
|
| 4770 |
+
"loss": 1.5586,
|
| 4771 |
+
"step": 678
|
| 4772 |
+
},
|
| 4773 |
+
{
|
| 4774 |
+
"epoch": 0.38634423897581793,
|
| 4775 |
+
"grad_norm": 0.5770754814147949,
|
| 4776 |
+
"learning_rate": 0.00010499900372741718,
|
| 4777 |
+
"loss": 1.3127,
|
| 4778 |
+
"step": 679
|
| 4779 |
+
},
|
| 4780 |
+
{
|
| 4781 |
+
"epoch": 0.3869132290184922,
|
| 4782 |
+
"grad_norm": 0.5917402505874634,
|
| 4783 |
+
"learning_rate": 0.00010477195303476011,
|
| 4784 |
+
"loss": 1.3799,
|
| 4785 |
+
"step": 680
|
| 4786 |
+
},
|
| 4787 |
+
{
|
| 4788 |
+
"epoch": 0.3874822190611664,
|
| 4789 |
+
"grad_norm": 0.5400240421295166,
|
| 4790 |
+
"learning_rate": 0.00010454487768286612,
|
| 4791 |
+
"loss": 1.2999,
|
| 4792 |
+
"step": 681
|
| 4793 |
+
},
|
| 4794 |
+
{
|
| 4795 |
+
"epoch": 0.38805120910384067,
|
| 4796 |
+
"grad_norm": 0.5468504428863525,
|
| 4797 |
+
"learning_rate": 0.00010431777884515514,
|
| 4798 |
+
"loss": 1.3114,
|
| 4799 |
+
"step": 682
|
| 4800 |
+
},
|
| 4801 |
+
{
|
| 4802 |
+
"epoch": 0.3886201991465149,
|
| 4803 |
+
"grad_norm": 0.5608039498329163,
|
| 4804 |
+
"learning_rate": 0.00010409065769516856,
|
| 4805 |
+
"loss": 1.3888,
|
| 4806 |
+
"step": 683
|
| 4807 |
+
},
|
| 4808 |
+
{
|
| 4809 |
+
"epoch": 0.3891891891891892,
|
| 4810 |
+
"grad_norm": 0.5961167216300964,
|
| 4811 |
+
"learning_rate": 0.00010386351540656292,
|
| 4812 |
+
"loss": 1.5431,
|
| 4813 |
+
"step": 684
|
| 4814 |
+
},
|
| 4815 |
+
{
|
| 4816 |
+
"epoch": 0.38975817923186346,
|
| 4817 |
+
"grad_norm": 0.5718376040458679,
|
| 4818 |
+
"learning_rate": 0.00010363635315310414,
|
| 4819 |
+
"loss": 1.521,
|
| 4820 |
+
"step": 685
|
| 4821 |
+
},
|
| 4822 |
+
{
|
| 4823 |
+
"epoch": 0.3903271692745377,
|
| 4824 |
+
"grad_norm": 0.5798651576042175,
|
| 4825 |
+
"learning_rate": 0.00010340917210866118,
|
| 4826 |
+
"loss": 1.519,
|
| 4827 |
+
"step": 686
|
| 4828 |
+
},
|
| 4829 |
+
{
|
| 4830 |
+
"epoch": 0.39089615931721194,
|
| 4831 |
+
"grad_norm": 0.5611982941627502,
|
| 4832 |
+
"learning_rate": 0.00010318197344720018,
|
| 4833 |
+
"loss": 1.499,
|
| 4834 |
+
"step": 687
|
| 4835 |
+
},
|
| 4836 |
+
{
|
| 4837 |
+
"epoch": 0.3914651493598862,
|
| 4838 |
+
"grad_norm": 0.571074366569519,
|
| 4839 |
+
"learning_rate": 0.00010295475834277831,
|
| 4840 |
+
"loss": 1.4738,
|
| 4841 |
+
"step": 688
|
| 4842 |
+
},
|
| 4843 |
+
{
|
| 4844 |
+
"epoch": 0.39203413940256043,
|
| 4845 |
+
"grad_norm": 0.5722329020500183,
|
| 4846 |
+
"learning_rate": 0.00010272752796953766,
|
| 4847 |
+
"loss": 1.6584,
|
| 4848 |
+
"step": 689
|
| 4849 |
+
},
|
| 4850 |
+
{
|
| 4851 |
+
"epoch": 0.39260312944523473,
|
| 4852 |
+
"grad_norm": 0.5674881935119629,
|
| 4853 |
+
"learning_rate": 0.00010250028350169931,
|
| 4854 |
+
"loss": 1.5507,
|
| 4855 |
+
"step": 690
|
| 4856 |
+
},
|
| 4857 |
+
{
|
| 4858 |
+
"epoch": 0.393172119487909,
|
| 4859 |
+
"grad_norm": 0.5546680688858032,
|
| 4860 |
+
"learning_rate": 0.00010227302611355712,
|
| 4861 |
+
"loss": 1.297,
|
| 4862 |
+
"step": 691
|
| 4863 |
+
},
|
| 4864 |
+
{
|
| 4865 |
+
"epoch": 0.3937411095305832,
|
| 4866 |
+
"grad_norm": 0.5614904165267944,
|
| 4867 |
+
"learning_rate": 0.00010204575697947168,
|
| 4868 |
+
"loss": 1.4416,
|
| 4869 |
+
"step": 692
|
| 4870 |
+
},
|
| 4871 |
+
{
|
| 4872 |
+
"epoch": 0.39431009957325747,
|
| 4873 |
+
"grad_norm": 0.5829195380210876,
|
| 4874 |
+
"learning_rate": 0.00010181847727386433,
|
| 4875 |
+
"loss": 1.5031,
|
| 4876 |
+
"step": 693
|
| 4877 |
+
},
|
| 4878 |
+
{
|
| 4879 |
+
"epoch": 0.3948790896159317,
|
| 4880 |
+
"grad_norm": 0.5744046568870544,
|
| 4881 |
+
"learning_rate": 0.00010159118817121105,
|
| 4882 |
+
"loss": 1.4576,
|
| 4883 |
+
"step": 694
|
| 4884 |
+
},
|
| 4885 |
+
{
|
| 4886 |
+
"epoch": 0.39544807965860596,
|
| 4887 |
+
"grad_norm": 0.572902262210846,
|
| 4888 |
+
"learning_rate": 0.00010136389084603637,
|
| 4889 |
+
"loss": 1.5078,
|
| 4890 |
+
"step": 695
|
| 4891 |
+
},
|
| 4892 |
+
{
|
| 4893 |
+
"epoch": 0.3960170697012802,
|
| 4894 |
+
"grad_norm": 0.5696277618408203,
|
| 4895 |
+
"learning_rate": 0.00010113658647290723,
|
| 4896 |
+
"loss": 1.4636,
|
| 4897 |
+
"step": 696
|
| 4898 |
+
},
|
| 4899 |
+
{
|
| 4900 |
+
"epoch": 0.3960170697012802,
|
| 4901 |
+
"eval_loss": 1.4791862964630127,
|
| 4902 |
+
"eval_runtime": 15.3322,
|
| 4903 |
+
"eval_samples_per_second": 48.265,
|
| 4904 |
+
"eval_steps_per_second": 24.132,
|
| 4905 |
+
"step": 696
|
| 4906 |
}
|
| 4907 |
],
|
| 4908 |
"logging_steps": 1,
|
|
|
|
| 4922 |
"attributes": {}
|
| 4923 |
}
|
| 4924 |
},
|
| 4925 |
+
"total_flos": 1.2374078814683136e+16,
|
| 4926 |
"train_batch_size": 2,
|
| 4927 |
"trial_name": null,
|
| 4928 |
"trial_params": null
|