Training in progress, step 1321, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 17640136
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5d4453d2c9c2becf5262f26bf63616202a19435dfe1526b9ecf432c196291d58
|
| 3 |
size 17640136
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 9569204
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b5d136fef80f6f88a96c44394b25a4b97961f38d82a30cda5580bc381ff4adf5
|
| 3 |
size 9569204
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cd2e7c36db06b88170c0c291465b36aecbd3055a15a728e6763938b84eae549c
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:94565402a24c2668bc4cd3a71a50fccf9c52b9a552eb6e6a7c3f099a1ebb7316
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 500,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -6958,6 +6958,2302 @@
|
|
| 6958 |
"learning_rate": 1.4560360491727231e-05,
|
| 6959 |
"loss": 1.5125,
|
| 6960 |
"step": 993
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6961 |
}
|
| 6962 |
],
|
| 6963 |
"logging_steps": 1,
|
|
@@ -6972,12 +9268,12 @@
|
|
| 6972 |
"should_evaluate": false,
|
| 6973 |
"should_log": false,
|
| 6974 |
"should_save": true,
|
| 6975 |
-
"should_training_stop":
|
| 6976 |
},
|
| 6977 |
"attributes": {}
|
| 6978 |
}
|
| 6979 |
},
|
| 6980 |
-
"total_flos":
|
| 6981 |
"train_batch_size": 4,
|
| 6982 |
"trial_name": null,
|
| 6983 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.07136104583637198,
|
| 5 |
"eval_steps": 500,
|
| 6 |
+
"global_step": 1321,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 6958 |
"learning_rate": 1.4560360491727231e-05,
|
| 6959 |
"loss": 1.5125,
|
| 6960 |
"step": 993
|
| 6961 |
+
},
|
| 6962 |
+
{
|
| 6963 |
+
"epoch": 0.053696350916997544,
|
| 6964 |
+
"grad_norm": 1.2181261777877808,
|
| 6965 |
+
"learning_rate": 1.4476262008220026e-05,
|
| 6966 |
+
"loss": 1.5256,
|
| 6967 |
+
"step": 994
|
| 6968 |
+
},
|
| 6969 |
+
{
|
| 6970 |
+
"epoch": 0.0537503713907571,
|
| 6971 |
+
"grad_norm": 1.2430329322814941,
|
| 6972 |
+
"learning_rate": 1.4392365969416278e-05,
|
| 6973 |
+
"loss": 1.6166,
|
| 6974 |
+
"step": 995
|
| 6975 |
+
},
|
| 6976 |
+
{
|
| 6977 |
+
"epoch": 0.05380439186451665,
|
| 6978 |
+
"grad_norm": 1.220637321472168,
|
| 6979 |
+
"learning_rate": 1.4308672853427501e-05,
|
| 6980 |
+
"loss": 1.4196,
|
| 6981 |
+
"step": 996
|
| 6982 |
+
},
|
| 6983 |
+
{
|
| 6984 |
+
"epoch": 0.05385841233827621,
|
| 6985 |
+
"grad_norm": 1.2425544261932373,
|
| 6986 |
+
"learning_rate": 1.4225183137208776e-05,
|
| 6987 |
+
"loss": 1.5519,
|
| 6988 |
+
"step": 997
|
| 6989 |
+
},
|
| 6990 |
+
{
|
| 6991 |
+
"epoch": 0.05391243281203576,
|
| 6992 |
+
"grad_norm": 1.4333747625350952,
|
| 6993 |
+
"learning_rate": 1.4141897296556095e-05,
|
| 6994 |
+
"loss": 1.6011,
|
| 6995 |
+
"step": 998
|
| 6996 |
+
},
|
| 6997 |
+
{
|
| 6998 |
+
"epoch": 0.053966453285795316,
|
| 6999 |
+
"grad_norm": 1.2730928659439087,
|
| 7000 |
+
"learning_rate": 1.4058815806103542e-05,
|
| 7001 |
+
"loss": 1.4444,
|
| 7002 |
+
"step": 999
|
| 7003 |
+
},
|
| 7004 |
+
{
|
| 7005 |
+
"epoch": 0.05402047375955487,
|
| 7006 |
+
"grad_norm": 1.2862390279769897,
|
| 7007 |
+
"learning_rate": 1.3975939139320643e-05,
|
| 7008 |
+
"loss": 1.2617,
|
| 7009 |
+
"step": 1000
|
| 7010 |
+
},
|
| 7011 |
+
{
|
| 7012 |
+
"epoch": 0.054074494233314424,
|
| 7013 |
+
"grad_norm": 1.120908498764038,
|
| 7014 |
+
"learning_rate": 1.389326776850966e-05,
|
| 7015 |
+
"loss": 1.8733,
|
| 7016 |
+
"step": 1001
|
| 7017 |
+
},
|
| 7018 |
+
{
|
| 7019 |
+
"epoch": 0.05412851470707398,
|
| 7020 |
+
"grad_norm": 1.3222132921218872,
|
| 7021 |
+
"learning_rate": 1.3810802164802949e-05,
|
| 7022 |
+
"loss": 1.8702,
|
| 7023 |
+
"step": 1002
|
| 7024 |
+
},
|
| 7025 |
+
{
|
| 7026 |
+
"epoch": 0.05418253518083353,
|
| 7027 |
+
"grad_norm": 1.1889939308166504,
|
| 7028 |
+
"learning_rate": 1.372854279816016e-05,
|
| 7029 |
+
"loss": 1.7841,
|
| 7030 |
+
"step": 1003
|
| 7031 |
+
},
|
| 7032 |
+
{
|
| 7033 |
+
"epoch": 0.054236555654593094,
|
| 7034 |
+
"grad_norm": 1.1374034881591797,
|
| 7035 |
+
"learning_rate": 1.3646490137365691e-05,
|
| 7036 |
+
"loss": 1.7828,
|
| 7037 |
+
"step": 1004
|
| 7038 |
+
},
|
| 7039 |
+
{
|
| 7040 |
+
"epoch": 0.05429057612835265,
|
| 7041 |
+
"grad_norm": 1.3104393482208252,
|
| 7042 |
+
"learning_rate": 1.3564644650025893e-05,
|
| 7043 |
+
"loss": 1.8222,
|
| 7044 |
+
"step": 1005
|
| 7045 |
+
},
|
| 7046 |
+
{
|
| 7047 |
+
"epoch": 0.0543445966021122,
|
| 7048 |
+
"grad_norm": 1.2193419933319092,
|
| 7049 |
+
"learning_rate": 1.3483006802566544e-05,
|
| 7050 |
+
"loss": 1.8304,
|
| 7051 |
+
"step": 1006
|
| 7052 |
+
},
|
| 7053 |
+
{
|
| 7054 |
+
"epoch": 0.05439861707587176,
|
| 7055 |
+
"grad_norm": 1.2971333265304565,
|
| 7056 |
+
"learning_rate": 1.3401577060230003e-05,
|
| 7057 |
+
"loss": 1.8499,
|
| 7058 |
+
"step": 1007
|
| 7059 |
+
},
|
| 7060 |
+
{
|
| 7061 |
+
"epoch": 0.05445263754963131,
|
| 7062 |
+
"grad_norm": 1.3229304552078247,
|
| 7063 |
+
"learning_rate": 1.3320355887072771e-05,
|
| 7064 |
+
"loss": 1.8741,
|
| 7065 |
+
"step": 1008
|
| 7066 |
+
},
|
| 7067 |
+
{
|
| 7068 |
+
"epoch": 0.054506658023390865,
|
| 7069 |
+
"grad_norm": 1.3427389860153198,
|
| 7070 |
+
"learning_rate": 1.3239343745962679e-05,
|
| 7071 |
+
"loss": 1.8382,
|
| 7072 |
+
"step": 1009
|
| 7073 |
+
},
|
| 7074 |
+
{
|
| 7075 |
+
"epoch": 0.05456067849715042,
|
| 7076 |
+
"grad_norm": 1.4409838914871216,
|
| 7077 |
+
"learning_rate": 1.315854109857635e-05,
|
| 7078 |
+
"loss": 2.0588,
|
| 7079 |
+
"step": 1010
|
| 7080 |
+
},
|
| 7081 |
+
{
|
| 7082 |
+
"epoch": 0.054614698970909974,
|
| 7083 |
+
"grad_norm": 2.174842357635498,
|
| 7084 |
+
"learning_rate": 1.3077948405396518e-05,
|
| 7085 |
+
"loss": 2.3197,
|
| 7086 |
+
"step": 1011
|
| 7087 |
+
},
|
| 7088 |
+
{
|
| 7089 |
+
"epoch": 0.05466871944466953,
|
| 7090 |
+
"grad_norm": 1.4808334112167358,
|
| 7091 |
+
"learning_rate": 1.2997566125709404e-05,
|
| 7092 |
+
"loss": 1.9601,
|
| 7093 |
+
"step": 1012
|
| 7094 |
+
},
|
| 7095 |
+
{
|
| 7096 |
+
"epoch": 0.05472273991842908,
|
| 7097 |
+
"grad_norm": 1.4893524646759033,
|
| 7098 |
+
"learning_rate": 1.2917394717602121e-05,
|
| 7099 |
+
"loss": 1.9145,
|
| 7100 |
+
"step": 1013
|
| 7101 |
+
},
|
| 7102 |
+
{
|
| 7103 |
+
"epoch": 0.054776760392188636,
|
| 7104 |
+
"grad_norm": 1.7592617273330688,
|
| 7105 |
+
"learning_rate": 1.283743463796009e-05,
|
| 7106 |
+
"loss": 2.0259,
|
| 7107 |
+
"step": 1014
|
| 7108 |
+
},
|
| 7109 |
+
{
|
| 7110 |
+
"epoch": 0.0548307808659482,
|
| 7111 |
+
"grad_norm": 1.5603734254837036,
|
| 7112 |
+
"learning_rate": 1.2757686342464341e-05,
|
| 7113 |
+
"loss": 1.8773,
|
| 7114 |
+
"step": 1015
|
| 7115 |
+
},
|
| 7116 |
+
{
|
| 7117 |
+
"epoch": 0.05488480133970775,
|
| 7118 |
+
"grad_norm": 1.5266613960266113,
|
| 7119 |
+
"learning_rate": 1.2678150285589047e-05,
|
| 7120 |
+
"loss": 1.787,
|
| 7121 |
+
"step": 1016
|
| 7122 |
+
},
|
| 7123 |
+
{
|
| 7124 |
+
"epoch": 0.054938821813467306,
|
| 7125 |
+
"grad_norm": 1.2215336561203003,
|
| 7126 |
+
"learning_rate": 1.2598826920598772e-05,
|
| 7127 |
+
"loss": 1.7083,
|
| 7128 |
+
"step": 1017
|
| 7129 |
+
},
|
| 7130 |
+
{
|
| 7131 |
+
"epoch": 0.05499284228722686,
|
| 7132 |
+
"grad_norm": 1.7575608491897583,
|
| 7133 |
+
"learning_rate": 1.251971669954607e-05,
|
| 7134 |
+
"loss": 1.6572,
|
| 7135 |
+
"step": 1018
|
| 7136 |
+
},
|
| 7137 |
+
{
|
| 7138 |
+
"epoch": 0.055046862760986415,
|
| 7139 |
+
"grad_norm": 1.2288124561309814,
|
| 7140 |
+
"learning_rate": 1.2440820073268749e-05,
|
| 7141 |
+
"loss": 1.5677,
|
| 7142 |
+
"step": 1019
|
| 7143 |
+
},
|
| 7144 |
+
{
|
| 7145 |
+
"epoch": 0.05510088323474597,
|
| 7146 |
+
"grad_norm": 1.2805033922195435,
|
| 7147 |
+
"learning_rate": 1.2362137491387432e-05,
|
| 7148 |
+
"loss": 1.9218,
|
| 7149 |
+
"step": 1020
|
| 7150 |
+
},
|
| 7151 |
+
{
|
| 7152 |
+
"epoch": 0.05515490370850552,
|
| 7153 |
+
"grad_norm": 1.2743886709213257,
|
| 7154 |
+
"learning_rate": 1.2283669402302878e-05,
|
| 7155 |
+
"loss": 1.7698,
|
| 7156 |
+
"step": 1021
|
| 7157 |
+
},
|
| 7158 |
+
{
|
| 7159 |
+
"epoch": 0.05520892418226508,
|
| 7160 |
+
"grad_norm": 1.1983518600463867,
|
| 7161 |
+
"learning_rate": 1.2205416253193548e-05,
|
| 7162 |
+
"loss": 1.6106,
|
| 7163 |
+
"step": 1022
|
| 7164 |
+
},
|
| 7165 |
+
{
|
| 7166 |
+
"epoch": 0.05526294465602463,
|
| 7167 |
+
"grad_norm": 1.3729217052459717,
|
| 7168 |
+
"learning_rate": 1.2127378490012887e-05,
|
| 7169 |
+
"loss": 1.9497,
|
| 7170 |
+
"step": 1023
|
| 7171 |
+
},
|
| 7172 |
+
{
|
| 7173 |
+
"epoch": 0.055316965129784186,
|
| 7174 |
+
"grad_norm": 1.2054262161254883,
|
| 7175 |
+
"learning_rate": 1.2049556557486996e-05,
|
| 7176 |
+
"loss": 1.8165,
|
| 7177 |
+
"step": 1024
|
| 7178 |
+
},
|
| 7179 |
+
{
|
| 7180 |
+
"epoch": 0.05537098560354374,
|
| 7181 |
+
"grad_norm": 13.658334732055664,
|
| 7182 |
+
"learning_rate": 1.197195089911191e-05,
|
| 7183 |
+
"loss": 2.0341,
|
| 7184 |
+
"step": 1025
|
| 7185 |
+
},
|
| 7186 |
+
{
|
| 7187 |
+
"epoch": 0.055425006077303295,
|
| 7188 |
+
"grad_norm": 1.099611520767212,
|
| 7189 |
+
"learning_rate": 1.1894561957151218e-05,
|
| 7190 |
+
"loss": 1.4052,
|
| 7191 |
+
"step": 1026
|
| 7192 |
+
},
|
| 7193 |
+
{
|
| 7194 |
+
"epoch": 0.055479026551062856,
|
| 7195 |
+
"grad_norm": 1.1526668071746826,
|
| 7196 |
+
"learning_rate": 1.1817390172633403e-05,
|
| 7197 |
+
"loss": 1.6077,
|
| 7198 |
+
"step": 1027
|
| 7199 |
+
},
|
| 7200 |
+
{
|
| 7201 |
+
"epoch": 0.05553304702482241,
|
| 7202 |
+
"grad_norm": 1.2204948663711548,
|
| 7203 |
+
"learning_rate": 1.1740435985349446e-05,
|
| 7204 |
+
"loss": 1.6568,
|
| 7205 |
+
"step": 1028
|
| 7206 |
+
},
|
| 7207 |
+
{
|
| 7208 |
+
"epoch": 0.055587067498581964,
|
| 7209 |
+
"grad_norm": 1.135537028312683,
|
| 7210 |
+
"learning_rate": 1.1663699833850238e-05,
|
| 7211 |
+
"loss": 1.5043,
|
| 7212 |
+
"step": 1029
|
| 7213 |
+
},
|
| 7214 |
+
{
|
| 7215 |
+
"epoch": 0.05564108797234152,
|
| 7216 |
+
"grad_norm": 1.1534159183502197,
|
| 7217 |
+
"learning_rate": 1.158718215544416e-05,
|
| 7218 |
+
"loss": 1.6232,
|
| 7219 |
+
"step": 1030
|
| 7220 |
+
},
|
| 7221 |
+
{
|
| 7222 |
+
"epoch": 0.05569510844610107,
|
| 7223 |
+
"grad_norm": 1.303829550743103,
|
| 7224 |
+
"learning_rate": 1.1510883386194493e-05,
|
| 7225 |
+
"loss": 1.7634,
|
| 7226 |
+
"step": 1031
|
| 7227 |
+
},
|
| 7228 |
+
{
|
| 7229 |
+
"epoch": 0.05574912891986063,
|
| 7230 |
+
"grad_norm": 1.1330665349960327,
|
| 7231 |
+
"learning_rate": 1.1434803960917056e-05,
|
| 7232 |
+
"loss": 1.3724,
|
| 7233 |
+
"step": 1032
|
| 7234 |
+
},
|
| 7235 |
+
{
|
| 7236 |
+
"epoch": 0.05580314939362018,
|
| 7237 |
+
"grad_norm": 1.3623616695404053,
|
| 7238 |
+
"learning_rate": 1.1358944313177567e-05,
|
| 7239 |
+
"loss": 1.7594,
|
| 7240 |
+
"step": 1033
|
| 7241 |
+
},
|
| 7242 |
+
{
|
| 7243 |
+
"epoch": 0.055857169867379736,
|
| 7244 |
+
"grad_norm": 1.3287618160247803,
|
| 7245 |
+
"learning_rate": 1.1283304875289336e-05,
|
| 7246 |
+
"loss": 1.6169,
|
| 7247 |
+
"step": 1034
|
| 7248 |
+
},
|
| 7249 |
+
{
|
| 7250 |
+
"epoch": 0.05591119034113929,
|
| 7251 |
+
"grad_norm": 1.377344012260437,
|
| 7252 |
+
"learning_rate": 1.1207886078310691e-05,
|
| 7253 |
+
"loss": 1.8275,
|
| 7254 |
+
"step": 1035
|
| 7255 |
+
},
|
| 7256 |
+
{
|
| 7257 |
+
"epoch": 0.055965210814898844,
|
| 7258 |
+
"grad_norm": 1.9010348320007324,
|
| 7259 |
+
"learning_rate": 1.1132688352042597e-05,
|
| 7260 |
+
"loss": 1.6534,
|
| 7261 |
+
"step": 1036
|
| 7262 |
+
},
|
| 7263 |
+
{
|
| 7264 |
+
"epoch": 0.0560192312886584,
|
| 7265 |
+
"grad_norm": 1.2945020198822021,
|
| 7266 |
+
"learning_rate": 1.1057712125026116e-05,
|
| 7267 |
+
"loss": 1.7023,
|
| 7268 |
+
"step": 1037
|
| 7269 |
+
},
|
| 7270 |
+
{
|
| 7271 |
+
"epoch": 0.05607325176241796,
|
| 7272 |
+
"grad_norm": 1.2867969274520874,
|
| 7273 |
+
"learning_rate": 1.098295782454008e-05,
|
| 7274 |
+
"loss": 1.6357,
|
| 7275 |
+
"step": 1038
|
| 7276 |
+
},
|
| 7277 |
+
{
|
| 7278 |
+
"epoch": 0.056127272236177514,
|
| 7279 |
+
"grad_norm": 1.2244540452957153,
|
| 7280 |
+
"learning_rate": 1.090842587659851e-05,
|
| 7281 |
+
"loss": 1.4945,
|
| 7282 |
+
"step": 1039
|
| 7283 |
+
},
|
| 7284 |
+
{
|
| 7285 |
+
"epoch": 0.05618129270993707,
|
| 7286 |
+
"grad_norm": 1.1381012201309204,
|
| 7287 |
+
"learning_rate": 1.0834116705948367e-05,
|
| 7288 |
+
"loss": 1.4949,
|
| 7289 |
+
"step": 1040
|
| 7290 |
+
},
|
| 7291 |
+
{
|
| 7292 |
+
"epoch": 0.05623531318369662,
|
| 7293 |
+
"grad_norm": 1.2034605741500854,
|
| 7294 |
+
"learning_rate": 1.0760030736066951e-05,
|
| 7295 |
+
"loss": 1.3877,
|
| 7296 |
+
"step": 1041
|
| 7297 |
+
},
|
| 7298 |
+
{
|
| 7299 |
+
"epoch": 0.05628933365745618,
|
| 7300 |
+
"grad_norm": 1.2891396284103394,
|
| 7301 |
+
"learning_rate": 1.0686168389159645e-05,
|
| 7302 |
+
"loss": 1.6676,
|
| 7303 |
+
"step": 1042
|
| 7304 |
+
},
|
| 7305 |
+
{
|
| 7306 |
+
"epoch": 0.05634335413121573,
|
| 7307 |
+
"grad_norm": 1.1766115427017212,
|
| 7308 |
+
"learning_rate": 1.0612530086157385e-05,
|
| 7309 |
+
"loss": 1.497,
|
| 7310 |
+
"step": 1043
|
| 7311 |
+
},
|
| 7312 |
+
{
|
| 7313 |
+
"epoch": 0.056397374604975285,
|
| 7314 |
+
"grad_norm": 1.2941296100616455,
|
| 7315 |
+
"learning_rate": 1.0539116246714332e-05,
|
| 7316 |
+
"loss": 1.5792,
|
| 7317 |
+
"step": 1044
|
| 7318 |
+
},
|
| 7319 |
+
{
|
| 7320 |
+
"epoch": 0.05645139507873484,
|
| 7321 |
+
"grad_norm": 1.31011164188385,
|
| 7322 |
+
"learning_rate": 1.0465927289205452e-05,
|
| 7323 |
+
"loss": 1.6068,
|
| 7324 |
+
"step": 1045
|
| 7325 |
+
},
|
| 7326 |
+
{
|
| 7327 |
+
"epoch": 0.056505415552494394,
|
| 7328 |
+
"grad_norm": 1.4861119985580444,
|
| 7329 |
+
"learning_rate": 1.0392963630724167e-05,
|
| 7330 |
+
"loss": 1.6911,
|
| 7331 |
+
"step": 1046
|
| 7332 |
+
},
|
| 7333 |
+
{
|
| 7334 |
+
"epoch": 0.05655943602625395,
|
| 7335 |
+
"grad_norm": 1.1797404289245605,
|
| 7336 |
+
"learning_rate": 1.032022568707991e-05,
|
| 7337 |
+
"loss": 1.3533,
|
| 7338 |
+
"step": 1047
|
| 7339 |
+
},
|
| 7340 |
+
{
|
| 7341 |
+
"epoch": 0.0566134565000135,
|
| 7342 |
+
"grad_norm": 1.2491984367370605,
|
| 7343 |
+
"learning_rate": 1.024771387279585e-05,
|
| 7344 |
+
"loss": 1.4556,
|
| 7345 |
+
"step": 1048
|
| 7346 |
+
},
|
| 7347 |
+
{
|
| 7348 |
+
"epoch": 0.05666747697377306,
|
| 7349 |
+
"grad_norm": 1.2857673168182373,
|
| 7350 |
+
"learning_rate": 1.017542860110644e-05,
|
| 7351 |
+
"loss": 1.5037,
|
| 7352 |
+
"step": 1049
|
| 7353 |
+
},
|
| 7354 |
+
{
|
| 7355 |
+
"epoch": 0.05672149744753262,
|
| 7356 |
+
"grad_norm": 1.407544493675232,
|
| 7357 |
+
"learning_rate": 1.0103370283955094e-05,
|
| 7358 |
+
"loss": 1.3902,
|
| 7359 |
+
"step": 1050
|
| 7360 |
+
},
|
| 7361 |
+
{
|
| 7362 |
+
"epoch": 0.05677551792129217,
|
| 7363 |
+
"grad_norm": 3.0389063358306885,
|
| 7364 |
+
"learning_rate": 1.0031539331991857e-05,
|
| 7365 |
+
"loss": 2.0438,
|
| 7366 |
+
"step": 1051
|
| 7367 |
+
},
|
| 7368 |
+
{
|
| 7369 |
+
"epoch": 0.056829538395051726,
|
| 7370 |
+
"grad_norm": 1.0532433986663818,
|
| 7371 |
+
"learning_rate": 9.959936154571076e-06,
|
| 7372 |
+
"loss": 1.6606,
|
| 7373 |
+
"step": 1052
|
| 7374 |
+
},
|
| 7375 |
+
{
|
| 7376 |
+
"epoch": 0.05688355886881128,
|
| 7377 |
+
"grad_norm": 1.2268394231796265,
|
| 7378 |
+
"learning_rate": 9.888561159748993e-06,
|
| 7379 |
+
"loss": 1.987,
|
| 7380 |
+
"step": 1053
|
| 7381 |
+
},
|
| 7382 |
+
{
|
| 7383 |
+
"epoch": 0.056937579342570835,
|
| 7384 |
+
"grad_norm": 1.0923404693603516,
|
| 7385 |
+
"learning_rate": 9.817414754281529e-06,
|
| 7386 |
+
"loss": 1.6947,
|
| 7387 |
+
"step": 1054
|
| 7388 |
+
},
|
| 7389 |
+
{
|
| 7390 |
+
"epoch": 0.05699159981633039,
|
| 7391 |
+
"grad_norm": 1.1017249822616577,
|
| 7392 |
+
"learning_rate": 9.746497343621857e-06,
|
| 7393 |
+
"loss": 1.8617,
|
| 7394 |
+
"step": 1055
|
| 7395 |
+
},
|
| 7396 |
+
{
|
| 7397 |
+
"epoch": 0.05704562029008994,
|
| 7398 |
+
"grad_norm": 1.2731496095657349,
|
| 7399 |
+
"learning_rate": 9.675809331918162e-06,
|
| 7400 |
+
"loss": 1.9759,
|
| 7401 |
+
"step": 1056
|
| 7402 |
+
},
|
| 7403 |
+
{
|
| 7404 |
+
"epoch": 0.0570996407638495,
|
| 7405 |
+
"grad_norm": 1.5008797645568848,
|
| 7406 |
+
"learning_rate": 9.605351122011309e-06,
|
| 7407 |
+
"loss": 1.7447,
|
| 7408 |
+
"step": 1057
|
| 7409 |
+
},
|
| 7410 |
+
{
|
| 7411 |
+
"epoch": 0.05715366123760905,
|
| 7412 |
+
"grad_norm": 1.2653428316116333,
|
| 7413 |
+
"learning_rate": 9.535123115432575e-06,
|
| 7414 |
+
"loss": 1.7309,
|
| 7415 |
+
"step": 1058
|
| 7416 |
+
},
|
| 7417 |
+
{
|
| 7418 |
+
"epoch": 0.057207681711368606,
|
| 7419 |
+
"grad_norm": 1.3103406429290771,
|
| 7420 |
+
"learning_rate": 9.465125712401325e-06,
|
| 7421 |
+
"loss": 1.8808,
|
| 7422 |
+
"step": 1059
|
| 7423 |
+
},
|
| 7424 |
+
{
|
| 7425 |
+
"epoch": 0.05726170218512816,
|
| 7426 |
+
"grad_norm": 1.3540958166122437,
|
| 7427 |
+
"learning_rate": 9.395359311822728e-06,
|
| 7428 |
+
"loss": 2.2229,
|
| 7429 |
+
"step": 1060
|
| 7430 |
+
},
|
| 7431 |
+
{
|
| 7432 |
+
"epoch": 0.05731572265888772,
|
| 7433 |
+
"grad_norm": 1.469577431678772,
|
| 7434 |
+
"learning_rate": 9.325824311285564e-06,
|
| 7435 |
+
"loss": 1.959,
|
| 7436 |
+
"step": 1061
|
| 7437 |
+
},
|
| 7438 |
+
{
|
| 7439 |
+
"epoch": 0.057369743132647276,
|
| 7440 |
+
"grad_norm": 1.458709478378296,
|
| 7441 |
+
"learning_rate": 9.256521107059834e-06,
|
| 7442 |
+
"loss": 2.3391,
|
| 7443 |
+
"step": 1062
|
| 7444 |
+
},
|
| 7445 |
+
{
|
| 7446 |
+
"epoch": 0.05742376360640683,
|
| 7447 |
+
"grad_norm": 1.429618000984192,
|
| 7448 |
+
"learning_rate": 9.187450094094586e-06,
|
| 7449 |
+
"loss": 2.0604,
|
| 7450 |
+
"step": 1063
|
| 7451 |
+
},
|
| 7452 |
+
{
|
| 7453 |
+
"epoch": 0.057477784080166384,
|
| 7454 |
+
"grad_norm": 1.5230368375778198,
|
| 7455 |
+
"learning_rate": 9.118611666015663e-06,
|
| 7456 |
+
"loss": 1.9131,
|
| 7457 |
+
"step": 1064
|
| 7458 |
+
},
|
| 7459 |
+
{
|
| 7460 |
+
"epoch": 0.05753180455392594,
|
| 7461 |
+
"grad_norm": 1.622948408126831,
|
| 7462 |
+
"learning_rate": 9.050006215123419e-06,
|
| 7463 |
+
"loss": 1.9809,
|
| 7464 |
+
"step": 1065
|
| 7465 |
+
},
|
| 7466 |
+
{
|
| 7467 |
+
"epoch": 0.05758582502768549,
|
| 7468 |
+
"grad_norm": 1.599082350730896,
|
| 7469 |
+
"learning_rate": 8.981634132390493e-06,
|
| 7470 |
+
"loss": 1.9437,
|
| 7471 |
+
"step": 1066
|
| 7472 |
+
},
|
| 7473 |
+
{
|
| 7474 |
+
"epoch": 0.05763984550144505,
|
| 7475 |
+
"grad_norm": 1.75623619556427,
|
| 7476 |
+
"learning_rate": 8.913495807459632e-06,
|
| 7477 |
+
"loss": 1.7925,
|
| 7478 |
+
"step": 1067
|
| 7479 |
+
},
|
| 7480 |
+
{
|
| 7481 |
+
"epoch": 0.0576938659752046,
|
| 7482 |
+
"grad_norm": 1.4249552488327026,
|
| 7483 |
+
"learning_rate": 8.845591628641386e-06,
|
| 7484 |
+
"loss": 1.7081,
|
| 7485 |
+
"step": 1068
|
| 7486 |
+
},
|
| 7487 |
+
{
|
| 7488 |
+
"epoch": 0.057747886448964156,
|
| 7489 |
+
"grad_norm": 1.2949086427688599,
|
| 7490 |
+
"learning_rate": 8.777921982911996e-06,
|
| 7491 |
+
"loss": 1.5007,
|
| 7492 |
+
"step": 1069
|
| 7493 |
+
},
|
| 7494 |
+
{
|
| 7495 |
+
"epoch": 0.05780190692272371,
|
| 7496 |
+
"grad_norm": 1.1852211952209473,
|
| 7497 |
+
"learning_rate": 8.710487255911042e-06,
|
| 7498 |
+
"loss": 1.5343,
|
| 7499 |
+
"step": 1070
|
| 7500 |
+
},
|
| 7501 |
+
{
|
| 7502 |
+
"epoch": 0.057855927396483264,
|
| 7503 |
+
"grad_norm": 1.1454377174377441,
|
| 7504 |
+
"learning_rate": 8.643287831939439e-06,
|
| 7505 |
+
"loss": 1.7417,
|
| 7506 |
+
"step": 1071
|
| 7507 |
+
},
|
| 7508 |
+
{
|
| 7509 |
+
"epoch": 0.05790994787024282,
|
| 7510 |
+
"grad_norm": 1.0570250749588013,
|
| 7511 |
+
"learning_rate": 8.576324093957067e-06,
|
| 7512 |
+
"loss": 1.6771,
|
| 7513 |
+
"step": 1072
|
| 7514 |
+
},
|
| 7515 |
+
{
|
| 7516 |
+
"epoch": 0.05796396834400238,
|
| 7517 |
+
"grad_norm": 1.3288506269454956,
|
| 7518 |
+
"learning_rate": 8.509596423580712e-06,
|
| 7519 |
+
"loss": 1.6015,
|
| 7520 |
+
"step": 1073
|
| 7521 |
+
},
|
| 7522 |
+
{
|
| 7523 |
+
"epoch": 0.058017988817761934,
|
| 7524 |
+
"grad_norm": 1.2331101894378662,
|
| 7525 |
+
"learning_rate": 8.443105201081808e-06,
|
| 7526 |
+
"loss": 1.781,
|
| 7527 |
+
"step": 1074
|
| 7528 |
+
},
|
| 7529 |
+
{
|
| 7530 |
+
"epoch": 0.05807200929152149,
|
| 7531 |
+
"grad_norm": 1.1146860122680664,
|
| 7532 |
+
"learning_rate": 8.376850805384362e-06,
|
| 7533 |
+
"loss": 1.5536,
|
| 7534 |
+
"step": 1075
|
| 7535 |
+
},
|
| 7536 |
+
{
|
| 7537 |
+
"epoch": 0.05812602976528104,
|
| 7538 |
+
"grad_norm": 1.1584901809692383,
|
| 7539 |
+
"learning_rate": 8.310833614062651e-06,
|
| 7540 |
+
"loss": 1.7387,
|
| 7541 |
+
"step": 1076
|
| 7542 |
+
},
|
| 7543 |
+
{
|
| 7544 |
+
"epoch": 0.0581800502390406,
|
| 7545 |
+
"grad_norm": 1.4165846109390259,
|
| 7546 |
+
"learning_rate": 8.245054003339247e-06,
|
| 7547 |
+
"loss": 1.7003,
|
| 7548 |
+
"step": 1077
|
| 7549 |
+
},
|
| 7550 |
+
{
|
| 7551 |
+
"epoch": 0.05823407071280015,
|
| 7552 |
+
"grad_norm": 1.2820955514907837,
|
| 7553 |
+
"learning_rate": 8.17951234808272e-06,
|
| 7554 |
+
"loss": 1.4942,
|
| 7555 |
+
"step": 1078
|
| 7556 |
+
},
|
| 7557 |
+
{
|
| 7558 |
+
"epoch": 0.058288091186559705,
|
| 7559 |
+
"grad_norm": 3.8230369091033936,
|
| 7560 |
+
"learning_rate": 8.114209021805614e-06,
|
| 7561 |
+
"loss": 2.1472,
|
| 7562 |
+
"step": 1079
|
| 7563 |
+
},
|
| 7564 |
+
{
|
| 7565 |
+
"epoch": 0.05834211166031926,
|
| 7566 |
+
"grad_norm": 1.2670732736587524,
|
| 7567 |
+
"learning_rate": 8.049144396662239e-06,
|
| 7568 |
+
"loss": 1.699,
|
| 7569 |
+
"step": 1080
|
| 7570 |
+
},
|
| 7571 |
+
{
|
| 7572 |
+
"epoch": 0.058396132134078814,
|
| 7573 |
+
"grad_norm": 1.347354769706726,
|
| 7574 |
+
"learning_rate": 7.984318843446593e-06,
|
| 7575 |
+
"loss": 1.8685,
|
| 7576 |
+
"step": 1081
|
| 7577 |
+
},
|
| 7578 |
+
{
|
| 7579 |
+
"epoch": 0.05845015260783837,
|
| 7580 |
+
"grad_norm": 1.4094970226287842,
|
| 7581 |
+
"learning_rate": 7.919732731590213e-06,
|
| 7582 |
+
"loss": 1.6999,
|
| 7583 |
+
"step": 1082
|
| 7584 |
+
},
|
| 7585 |
+
{
|
| 7586 |
+
"epoch": 0.05850417308159792,
|
| 7587 |
+
"grad_norm": 1.5534085035324097,
|
| 7588 |
+
"learning_rate": 7.85538642916015e-06,
|
| 7589 |
+
"loss": 1.3972,
|
| 7590 |
+
"step": 1083
|
| 7591 |
+
},
|
| 7592 |
+
{
|
| 7593 |
+
"epoch": 0.058558193555357484,
|
| 7594 |
+
"grad_norm": 1.2333054542541504,
|
| 7595 |
+
"learning_rate": 7.791280302856752e-06,
|
| 7596 |
+
"loss": 1.8481,
|
| 7597 |
+
"step": 1084
|
| 7598 |
+
},
|
| 7599 |
+
{
|
| 7600 |
+
"epoch": 0.05861221402911704,
|
| 7601 |
+
"grad_norm": 1.2995198965072632,
|
| 7602 |
+
"learning_rate": 7.727414718011704e-06,
|
| 7603 |
+
"loss": 1.646,
|
| 7604 |
+
"step": 1085
|
| 7605 |
+
},
|
| 7606 |
+
{
|
| 7607 |
+
"epoch": 0.05866623450287659,
|
| 7608 |
+
"grad_norm": 1.2307482957839966,
|
| 7609 |
+
"learning_rate": 7.663790038585793e-06,
|
| 7610 |
+
"loss": 1.451,
|
| 7611 |
+
"step": 1086
|
| 7612 |
+
},
|
| 7613 |
+
{
|
| 7614 |
+
"epoch": 0.058720254976636146,
|
| 7615 |
+
"grad_norm": 1.27884840965271,
|
| 7616 |
+
"learning_rate": 7.600406627167006e-06,
|
| 7617 |
+
"loss": 1.6694,
|
| 7618 |
+
"step": 1087
|
| 7619 |
+
},
|
| 7620 |
+
{
|
| 7621 |
+
"epoch": 0.0587742754503957,
|
| 7622 |
+
"grad_norm": 1.2820953130722046,
|
| 7623 |
+
"learning_rate": 7.537264844968323e-06,
|
| 7624 |
+
"loss": 1.6182,
|
| 7625 |
+
"step": 1088
|
| 7626 |
+
},
|
| 7627 |
+
{
|
| 7628 |
+
"epoch": 0.058828295924155255,
|
| 7629 |
+
"grad_norm": 1.137299656867981,
|
| 7630 |
+
"learning_rate": 7.474365051825749e-06,
|
| 7631 |
+
"loss": 1.5432,
|
| 7632 |
+
"step": 1089
|
| 7633 |
+
},
|
| 7634 |
+
{
|
| 7635 |
+
"epoch": 0.05888231639791481,
|
| 7636 |
+
"grad_norm": 1.2992099523544312,
|
| 7637 |
+
"learning_rate": 7.4117076061961885e-06,
|
| 7638 |
+
"loss": 1.5485,
|
| 7639 |
+
"step": 1090
|
| 7640 |
+
},
|
| 7641 |
+
{
|
| 7642 |
+
"epoch": 0.058936336871674364,
|
| 7643 |
+
"grad_norm": 1.2578413486480713,
|
| 7644 |
+
"learning_rate": 7.349292865155499e-06,
|
| 7645 |
+
"loss": 1.3225,
|
| 7646 |
+
"step": 1091
|
| 7647 |
+
},
|
| 7648 |
+
{
|
| 7649 |
+
"epoch": 0.05899035734543392,
|
| 7650 |
+
"grad_norm": 1.2749775648117065,
|
| 7651 |
+
"learning_rate": 7.287121184396328e-06,
|
| 7652 |
+
"loss": 1.6233,
|
| 7653 |
+
"step": 1092
|
| 7654 |
+
},
|
| 7655 |
+
{
|
| 7656 |
+
"epoch": 0.05904437781919347,
|
| 7657 |
+
"grad_norm": 1.2771323919296265,
|
| 7658 |
+
"learning_rate": 7.225192918226214e-06,
|
| 7659 |
+
"loss": 1.7542,
|
| 7660 |
+
"step": 1093
|
| 7661 |
+
},
|
| 7662 |
+
{
|
| 7663 |
+
"epoch": 0.059098398292953026,
|
| 7664 |
+
"grad_norm": 1.5020413398742676,
|
| 7665 |
+
"learning_rate": 7.1635084195654845e-06,
|
| 7666 |
+
"loss": 1.6397,
|
| 7667 |
+
"step": 1094
|
| 7668 |
+
},
|
| 7669 |
+
{
|
| 7670 |
+
"epoch": 0.05915241876671259,
|
| 7671 |
+
"grad_norm": 1.2363253831863403,
|
| 7672 |
+
"learning_rate": 7.102068039945292e-06,
|
| 7673 |
+
"loss": 1.5622,
|
| 7674 |
+
"step": 1095
|
| 7675 |
+
},
|
| 7676 |
+
{
|
| 7677 |
+
"epoch": 0.05920643924047214,
|
| 7678 |
+
"grad_norm": 1.1676700115203857,
|
| 7679 |
+
"learning_rate": 7.04087212950556e-06,
|
| 7680 |
+
"loss": 1.4626,
|
| 7681 |
+
"step": 1096
|
| 7682 |
+
},
|
| 7683 |
+
{
|
| 7684 |
+
"epoch": 0.059260459714231696,
|
| 7685 |
+
"grad_norm": 1.2555509805679321,
|
| 7686 |
+
"learning_rate": 6.979921036993042e-06,
|
| 7687 |
+
"loss": 1.5168,
|
| 7688 |
+
"step": 1097
|
| 7689 |
+
},
|
| 7690 |
+
{
|
| 7691 |
+
"epoch": 0.05931448018799125,
|
| 7692 |
+
"grad_norm": 1.4620730876922607,
|
| 7693 |
+
"learning_rate": 6.919215109759275e-06,
|
| 7694 |
+
"loss": 1.7078,
|
| 7695 |
+
"step": 1098
|
| 7696 |
+
},
|
| 7697 |
+
{
|
| 7698 |
+
"epoch": 0.059368500661750805,
|
| 7699 |
+
"grad_norm": 1.191582441329956,
|
| 7700 |
+
"learning_rate": 6.858754693758695e-06,
|
| 7701 |
+
"loss": 1.4155,
|
| 7702 |
+
"step": 1099
|
| 7703 |
+
},
|
| 7704 |
+
{
|
| 7705 |
+
"epoch": 0.05942252113551036,
|
| 7706 |
+
"grad_norm": 1.3197084665298462,
|
| 7707 |
+
"learning_rate": 6.798540133546533e-06,
|
| 7708 |
+
"loss": 1.5325,
|
| 7709 |
+
"step": 1100
|
| 7710 |
+
},
|
| 7711 |
+
{
|
| 7712 |
+
"epoch": 0.05947654160926991,
|
| 7713 |
+
"grad_norm": 1.2210067510604858,
|
| 7714 |
+
"learning_rate": 6.738571772276997e-06,
|
| 7715 |
+
"loss": 1.6138,
|
| 7716 |
+
"step": 1101
|
| 7717 |
+
},
|
| 7718 |
+
{
|
| 7719 |
+
"epoch": 0.05953056208302947,
|
| 7720 |
+
"grad_norm": 1.0792077779769897,
|
| 7721 |
+
"learning_rate": 6.67884995170116e-06,
|
| 7722 |
+
"loss": 1.9985,
|
| 7723 |
+
"step": 1102
|
| 7724 |
+
},
|
| 7725 |
+
{
|
| 7726 |
+
"epoch": 0.05958458255678902,
|
| 7727 |
+
"grad_norm": 1.2398253679275513,
|
| 7728 |
+
"learning_rate": 6.619375012165169e-06,
|
| 7729 |
+
"loss": 1.7879,
|
| 7730 |
+
"step": 1103
|
| 7731 |
+
},
|
| 7732 |
+
{
|
| 7733 |
+
"epoch": 0.059638603030548576,
|
| 7734 |
+
"grad_norm": 1.2937052249908447,
|
| 7735 |
+
"learning_rate": 6.5601472926081766e-06,
|
| 7736 |
+
"loss": 1.6415,
|
| 7737 |
+
"step": 1104
|
| 7738 |
+
},
|
| 7739 |
+
{
|
| 7740 |
+
"epoch": 0.05969262350430813,
|
| 7741 |
+
"grad_norm": 1.1194475889205933,
|
| 7742 |
+
"learning_rate": 6.501167130560515e-06,
|
| 7743 |
+
"loss": 1.7831,
|
| 7744 |
+
"step": 1105
|
| 7745 |
+
},
|
| 7746 |
+
{
|
| 7747 |
+
"epoch": 0.059746643978067684,
|
| 7748 |
+
"grad_norm": 1.1798999309539795,
|
| 7749 |
+
"learning_rate": 6.442434862141666e-06,
|
| 7750 |
+
"loss": 1.6951,
|
| 7751 |
+
"step": 1106
|
| 7752 |
+
},
|
| 7753 |
+
{
|
| 7754 |
+
"epoch": 0.059800664451827246,
|
| 7755 |
+
"grad_norm": 1.2753263711929321,
|
| 7756 |
+
"learning_rate": 6.383950822058471e-06,
|
| 7757 |
+
"loss": 2.1072,
|
| 7758 |
+
"step": 1107
|
| 7759 |
+
},
|
| 7760 |
+
{
|
| 7761 |
+
"epoch": 0.0598546849255868,
|
| 7762 |
+
"grad_norm": 1.355417251586914,
|
| 7763 |
+
"learning_rate": 6.325715343603056e-06,
|
| 7764 |
+
"loss": 2.1765,
|
| 7765 |
+
"step": 1108
|
| 7766 |
+
},
|
| 7767 |
+
{
|
| 7768 |
+
"epoch": 0.059908705399346354,
|
| 7769 |
+
"grad_norm": 1.5232635736465454,
|
| 7770 |
+
"learning_rate": 6.267728758651132e-06,
|
| 7771 |
+
"loss": 1.8715,
|
| 7772 |
+
"step": 1109
|
| 7773 |
+
},
|
| 7774 |
+
{
|
| 7775 |
+
"epoch": 0.05996272587310591,
|
| 7776 |
+
"grad_norm": 1.2762973308563232,
|
| 7777 |
+
"learning_rate": 6.209991397659926e-06,
|
| 7778 |
+
"loss": 1.6723,
|
| 7779 |
+
"step": 1110
|
| 7780 |
+
},
|
| 7781 |
+
{
|
| 7782 |
+
"epoch": 0.06001674634686546,
|
| 7783 |
+
"grad_norm": 1.4560863971710205,
|
| 7784 |
+
"learning_rate": 6.152503589666425e-06,
|
| 7785 |
+
"loss": 1.9059,
|
| 7786 |
+
"step": 1111
|
| 7787 |
+
},
|
| 7788 |
+
{
|
| 7789 |
+
"epoch": 0.06007076682062502,
|
| 7790 |
+
"grad_norm": 1.6692922115325928,
|
| 7791 |
+
"learning_rate": 6.095265662285421e-06,
|
| 7792 |
+
"loss": 1.9578,
|
| 7793 |
+
"step": 1112
|
| 7794 |
+
},
|
| 7795 |
+
{
|
| 7796 |
+
"epoch": 0.06012478729438457,
|
| 7797 |
+
"grad_norm": 1.535998821258545,
|
| 7798 |
+
"learning_rate": 6.03827794170767e-06,
|
| 7799 |
+
"loss": 1.9427,
|
| 7800 |
+
"step": 1113
|
| 7801 |
+
},
|
| 7802 |
+
{
|
| 7803 |
+
"epoch": 0.060178807768144126,
|
| 7804 |
+
"grad_norm": 1.8105800151824951,
|
| 7805 |
+
"learning_rate": 5.981540752698034e-06,
|
| 7806 |
+
"loss": 2.0216,
|
| 7807 |
+
"step": 1114
|
| 7808 |
+
},
|
| 7809 |
+
{
|
| 7810 |
+
"epoch": 0.06023282824190368,
|
| 7811 |
+
"grad_norm": 2.117344617843628,
|
| 7812 |
+
"learning_rate": 5.9250544185936675e-06,
|
| 7813 |
+
"loss": 2.1284,
|
| 7814 |
+
"step": 1115
|
| 7815 |
+
},
|
| 7816 |
+
{
|
| 7817 |
+
"epoch": 0.060286848715663234,
|
| 7818 |
+
"grad_norm": 1.9187610149383545,
|
| 7819 |
+
"learning_rate": 5.868819261302088e-06,
|
| 7820 |
+
"loss": 1.8746,
|
| 7821 |
+
"step": 1116
|
| 7822 |
+
},
|
| 7823 |
+
{
|
| 7824 |
+
"epoch": 0.06034086918942279,
|
| 7825 |
+
"grad_norm": 1.2992360591888428,
|
| 7826 |
+
"learning_rate": 5.8128356012994375e-06,
|
| 7827 |
+
"loss": 1.7569,
|
| 7828 |
+
"step": 1117
|
| 7829 |
+
},
|
| 7830 |
+
{
|
| 7831 |
+
"epoch": 0.06039488966318235,
|
| 7832 |
+
"grad_norm": 1.1609463691711426,
|
| 7833 |
+
"learning_rate": 5.757103757628573e-06,
|
| 7834 |
+
"loss": 1.6138,
|
| 7835 |
+
"step": 1118
|
| 7836 |
+
},
|
| 7837 |
+
{
|
| 7838 |
+
"epoch": 0.060448910136941904,
|
| 7839 |
+
"grad_norm": 1.2777591943740845,
|
| 7840 |
+
"learning_rate": 5.701624047897314e-06,
|
| 7841 |
+
"loss": 1.9051,
|
| 7842 |
+
"step": 1119
|
| 7843 |
+
},
|
| 7844 |
+
{
|
| 7845 |
+
"epoch": 0.06050293061070146,
|
| 7846 |
+
"grad_norm": 1.2517619132995605,
|
| 7847 |
+
"learning_rate": 5.646396788276575e-06,
|
| 7848 |
+
"loss": 1.592,
|
| 7849 |
+
"step": 1120
|
| 7850 |
+
},
|
| 7851 |
+
{
|
| 7852 |
+
"epoch": 0.06055695108446101,
|
| 7853 |
+
"grad_norm": 1.1678047180175781,
|
| 7854 |
+
"learning_rate": 5.591422293498633e-06,
|
| 7855 |
+
"loss": 1.6107,
|
| 7856 |
+
"step": 1121
|
| 7857 |
+
},
|
| 7858 |
+
{
|
| 7859 |
+
"epoch": 0.06061097155822057,
|
| 7860 |
+
"grad_norm": 1.3541233539581299,
|
| 7861 |
+
"learning_rate": 5.536700876855255e-06,
|
| 7862 |
+
"loss": 1.6731,
|
| 7863 |
+
"step": 1122
|
| 7864 |
+
},
|
| 7865 |
+
{
|
| 7866 |
+
"epoch": 0.06066499203198012,
|
| 7867 |
+
"grad_norm": 1.3398699760437012,
|
| 7868 |
+
"learning_rate": 5.48223285019599e-06,
|
| 7869 |
+
"loss": 1.7476,
|
| 7870 |
+
"step": 1123
|
| 7871 |
+
},
|
| 7872 |
+
{
|
| 7873 |
+
"epoch": 0.060719012505739675,
|
| 7874 |
+
"grad_norm": 1.458789348602295,
|
| 7875 |
+
"learning_rate": 5.42801852392632e-06,
|
| 7876 |
+
"loss": 1.9192,
|
| 7877 |
+
"step": 1124
|
| 7878 |
+
},
|
| 7879 |
+
{
|
| 7880 |
+
"epoch": 0.06077303297949923,
|
| 7881 |
+
"grad_norm": 1.311495304107666,
|
| 7882 |
+
"learning_rate": 5.374058207005944e-06,
|
| 7883 |
+
"loss": 1.5868,
|
| 7884 |
+
"step": 1125
|
| 7885 |
+
},
|
| 7886 |
+
{
|
| 7887 |
+
"epoch": 0.060827053453258784,
|
| 7888 |
+
"grad_norm": 1.245322346687317,
|
| 7889 |
+
"learning_rate": 5.320352206946982e-06,
|
| 7890 |
+
"loss": 1.6681,
|
| 7891 |
+
"step": 1126
|
| 7892 |
+
},
|
| 7893 |
+
{
|
| 7894 |
+
"epoch": 0.06088107392701834,
|
| 7895 |
+
"grad_norm": 1.0974891185760498,
|
| 7896 |
+
"learning_rate": 5.2669008298122655e-06,
|
| 7897 |
+
"loss": 1.4482,
|
| 7898 |
+
"step": 1127
|
| 7899 |
+
},
|
| 7900 |
+
{
|
| 7901 |
+
"epoch": 0.06093509440077789,
|
| 7902 |
+
"grad_norm": 1.202539086341858,
|
| 7903 |
+
"learning_rate": 5.213704380213546e-06,
|
| 7904 |
+
"loss": 1.6439,
|
| 7905 |
+
"step": 1128
|
| 7906 |
+
},
|
| 7907 |
+
{
|
| 7908 |
+
"epoch": 0.060989114874537446,
|
| 7909 |
+
"grad_norm": 1.1249674558639526,
|
| 7910 |
+
"learning_rate": 5.160763161309767e-06,
|
| 7911 |
+
"loss": 1.5234,
|
| 7912 |
+
"step": 1129
|
| 7913 |
+
},
|
| 7914 |
+
{
|
| 7915 |
+
"epoch": 0.06104313534829701,
|
| 7916 |
+
"grad_norm": 2.8429598808288574,
|
| 7917 |
+
"learning_rate": 5.108077474805384e-06,
|
| 7918 |
+
"loss": 1.7275,
|
| 7919 |
+
"step": 1130
|
| 7920 |
+
},
|
| 7921 |
+
{
|
| 7922 |
+
"epoch": 0.06109715582205656,
|
| 7923 |
+
"grad_norm": 1.1571016311645508,
|
| 7924 |
+
"learning_rate": 5.0556476209485785e-06,
|
| 7925 |
+
"loss": 1.6557,
|
| 7926 |
+
"step": 1131
|
| 7927 |
+
},
|
| 7928 |
+
{
|
| 7929 |
+
"epoch": 0.061151176295816116,
|
| 7930 |
+
"grad_norm": 1.2372342348098755,
|
| 7931 |
+
"learning_rate": 5.0034738985296095e-06,
|
| 7932 |
+
"loss": 1.6383,
|
| 7933 |
+
"step": 1132
|
| 7934 |
+
},
|
| 7935 |
+
{
|
| 7936 |
+
"epoch": 0.06120519676957567,
|
| 7937 |
+
"grad_norm": 1.2769287824630737,
|
| 7938 |
+
"learning_rate": 4.951556604879048e-06,
|
| 7939 |
+
"loss": 1.774,
|
| 7940 |
+
"step": 1133
|
| 7941 |
+
},
|
| 7942 |
+
{
|
| 7943 |
+
"epoch": 0.061259217243335225,
|
| 7944 |
+
"grad_norm": 1.4651716947555542,
|
| 7945 |
+
"learning_rate": 4.899896035866125e-06,
|
| 7946 |
+
"loss": 1.8815,
|
| 7947 |
+
"step": 1134
|
| 7948 |
+
},
|
| 7949 |
+
{
|
| 7950 |
+
"epoch": 0.06131323771709478,
|
| 7951 |
+
"grad_norm": 1.2836899757385254,
|
| 7952 |
+
"learning_rate": 4.8484924858970324e-06,
|
| 7953 |
+
"loss": 1.395,
|
| 7954 |
+
"step": 1135
|
| 7955 |
+
},
|
| 7956 |
+
{
|
| 7957 |
+
"epoch": 0.06136725819085433,
|
| 7958 |
+
"grad_norm": 1.3144505023956299,
|
| 7959 |
+
"learning_rate": 4.7973462479132645e-06,
|
| 7960 |
+
"loss": 1.6538,
|
| 7961 |
+
"step": 1136
|
| 7962 |
+
},
|
| 7963 |
+
{
|
| 7964 |
+
"epoch": 0.06142127866461389,
|
| 7965 |
+
"grad_norm": 1.1569708585739136,
|
| 7966 |
+
"learning_rate": 4.746457613389904e-06,
|
| 7967 |
+
"loss": 1.5828,
|
| 7968 |
+
"step": 1137
|
| 7969 |
+
},
|
| 7970 |
+
{
|
| 7971 |
+
"epoch": 0.06147529913837344,
|
| 7972 |
+
"grad_norm": 1.16856849193573,
|
| 7973 |
+
"learning_rate": 4.695826872334036e-06,
|
| 7974 |
+
"loss": 1.7049,
|
| 7975 |
+
"step": 1138
|
| 7976 |
+
},
|
| 7977 |
+
{
|
| 7978 |
+
"epoch": 0.061529319612132996,
|
| 7979 |
+
"grad_norm": 1.1814018487930298,
|
| 7980 |
+
"learning_rate": 4.645454313282965e-06,
|
| 7981 |
+
"loss": 1.5373,
|
| 7982 |
+
"step": 1139
|
| 7983 |
+
},
|
| 7984 |
+
{
|
| 7985 |
+
"epoch": 0.06158334008589255,
|
| 7986 |
+
"grad_norm": 1.1851643323898315,
|
| 7987 |
+
"learning_rate": 4.595340223302735e-06,
|
| 7988 |
+
"loss": 1.6187,
|
| 7989 |
+
"step": 1140
|
| 7990 |
+
},
|
| 7991 |
+
{
|
| 7992 |
+
"epoch": 0.06163736055965211,
|
| 7993 |
+
"grad_norm": 1.19649076461792,
|
| 7994 |
+
"learning_rate": 4.545484887986368e-06,
|
| 7995 |
+
"loss": 1.5802,
|
| 7996 |
+
"step": 1141
|
| 7997 |
+
},
|
| 7998 |
+
{
|
| 7999 |
+
"epoch": 0.061691381033411666,
|
| 8000 |
+
"grad_norm": 1.5136679410934448,
|
| 8001 |
+
"learning_rate": 4.495888591452302e-06,
|
| 8002 |
+
"loss": 1.815,
|
| 8003 |
+
"step": 1142
|
| 8004 |
+
},
|
| 8005 |
+
{
|
| 8006 |
+
"epoch": 0.06174540150717122,
|
| 8007 |
+
"grad_norm": 1.2284526824951172,
|
| 8008 |
+
"learning_rate": 4.44655161634272e-06,
|
| 8009 |
+
"loss": 1.6512,
|
| 8010 |
+
"step": 1143
|
| 8011 |
+
},
|
| 8012 |
+
{
|
| 8013 |
+
"epoch": 0.061799421980930774,
|
| 8014 |
+
"grad_norm": 1.307127833366394,
|
| 8015 |
+
"learning_rate": 4.39747424382202e-06,
|
| 8016 |
+
"loss": 1.4766,
|
| 8017 |
+
"step": 1144
|
| 8018 |
+
},
|
| 8019 |
+
{
|
| 8020 |
+
"epoch": 0.06185344245469033,
|
| 8021 |
+
"grad_norm": 1.18198823928833,
|
| 8022 |
+
"learning_rate": 4.348656753575092e-06,
|
| 8023 |
+
"loss": 1.3779,
|
| 8024 |
+
"step": 1145
|
| 8025 |
+
},
|
| 8026 |
+
{
|
| 8027 |
+
"epoch": 0.06190746292844988,
|
| 8028 |
+
"grad_norm": 1.2481982707977295,
|
| 8029 |
+
"learning_rate": 4.3000994238058644e-06,
|
| 8030 |
+
"loss": 1.3331,
|
| 8031 |
+
"step": 1146
|
| 8032 |
+
},
|
| 8033 |
+
{
|
| 8034 |
+
"epoch": 0.06196148340220944,
|
| 8035 |
+
"grad_norm": 1.3494194746017456,
|
| 8036 |
+
"learning_rate": 4.251802531235593e-06,
|
| 8037 |
+
"loss": 1.6662,
|
| 8038 |
+
"step": 1147
|
| 8039 |
+
},
|
| 8040 |
+
{
|
| 8041 |
+
"epoch": 0.06201550387596899,
|
| 8042 |
+
"grad_norm": 1.419435739517212,
|
| 8043 |
+
"learning_rate": 4.203766351101385e-06,
|
| 8044 |
+
"loss": 1.6665,
|
| 8045 |
+
"step": 1148
|
| 8046 |
+
},
|
| 8047 |
+
{
|
| 8048 |
+
"epoch": 0.062069524349728546,
|
| 8049 |
+
"grad_norm": 1.3820433616638184,
|
| 8050 |
+
"learning_rate": 4.155991157154554e-06,
|
| 8051 |
+
"loss": 1.457,
|
| 8052 |
+
"step": 1149
|
| 8053 |
+
},
|
| 8054 |
+
{
|
| 8055 |
+
"epoch": 0.0621235448234881,
|
| 8056 |
+
"grad_norm": 1.477196216583252,
|
| 8057 |
+
"learning_rate": 4.108477221659091e-06,
|
| 8058 |
+
"loss": 1.4296,
|
| 8059 |
+
"step": 1150
|
| 8060 |
+
},
|
| 8061 |
+
{
|
| 8062 |
+
"epoch": 0.062177565297247654,
|
| 8063 |
+
"grad_norm": 3.461652994155884,
|
| 8064 |
+
"learning_rate": 4.061224815390119e-06,
|
| 8065 |
+
"loss": 2.1687,
|
| 8066 |
+
"step": 1151
|
| 8067 |
+
},
|
| 8068 |
+
{
|
| 8069 |
+
"epoch": 0.06223158577100721,
|
| 8070 |
+
"grad_norm": 1.1380836963653564,
|
| 8071 |
+
"learning_rate": 4.0142342076323615e-06,
|
| 8072 |
+
"loss": 2.0233,
|
| 8073 |
+
"step": 1152
|
| 8074 |
+
},
|
| 8075 |
+
{
|
| 8076 |
+
"epoch": 0.06228560624476677,
|
| 8077 |
+
"grad_norm": 1.6371618509292603,
|
| 8078 |
+
"learning_rate": 3.967505666178556e-06,
|
| 8079 |
+
"loss": 2.0356,
|
| 8080 |
+
"step": 1153
|
| 8081 |
+
},
|
| 8082 |
+
{
|
| 8083 |
+
"epoch": 0.062339626718526324,
|
| 8084 |
+
"grad_norm": 1.0716702938079834,
|
| 8085 |
+
"learning_rate": 3.921039457328007e-06,
|
| 8086 |
+
"loss": 1.6833,
|
| 8087 |
+
"step": 1154
|
| 8088 |
+
},
|
| 8089 |
+
{
|
| 8090 |
+
"epoch": 0.06239364719228588,
|
| 8091 |
+
"grad_norm": 1.35277259349823,
|
| 8092 |
+
"learning_rate": 3.8748358458849555e-06,
|
| 8093 |
+
"loss": 1.7738,
|
| 8094 |
+
"step": 1155
|
| 8095 |
+
},
|
| 8096 |
+
{
|
| 8097 |
+
"epoch": 0.06244766766604543,
|
| 8098 |
+
"grad_norm": 2.2490251064300537,
|
| 8099 |
+
"learning_rate": 3.828895095157203e-06,
|
| 8100 |
+
"loss": 1.8131,
|
| 8101 |
+
"step": 1156
|
| 8102 |
+
},
|
| 8103 |
+
{
|
| 8104 |
+
"epoch": 0.06250168813980499,
|
| 8105 |
+
"grad_norm": 1.1562142372131348,
|
| 8106 |
+
"learning_rate": 3.783217466954503e-06,
|
| 8107 |
+
"loss": 1.7693,
|
| 8108 |
+
"step": 1157
|
| 8109 |
+
},
|
| 8110 |
+
{
|
| 8111 |
+
"epoch": 0.06255570861356453,
|
| 8112 |
+
"grad_norm": 1.3527991771697998,
|
| 8113 |
+
"learning_rate": 3.737803221587144e-06,
|
| 8114 |
+
"loss": 1.7414,
|
| 8115 |
+
"step": 1158
|
| 8116 |
+
},
|
| 8117 |
+
{
|
| 8118 |
+
"epoch": 0.0626097290873241,
|
| 8119 |
+
"grad_norm": 1.3626788854599,
|
| 8120 |
+
"learning_rate": 3.6926526178644007e-06,
|
| 8121 |
+
"loss": 1.7507,
|
| 8122 |
+
"step": 1159
|
| 8123 |
+
},
|
| 8124 |
+
{
|
| 8125 |
+
"epoch": 0.06266374956108366,
|
| 8126 |
+
"grad_norm": 1.3011306524276733,
|
| 8127 |
+
"learning_rate": 3.647765913093132e-06,
|
| 8128 |
+
"loss": 1.7361,
|
| 8129 |
+
"step": 1160
|
| 8130 |
+
},
|
| 8131 |
+
{
|
| 8132 |
+
"epoch": 0.0627177700348432,
|
| 8133 |
+
"grad_norm": 1.403554081916809,
|
| 8134 |
+
"learning_rate": 3.603143363076217e-06,
|
| 8135 |
+
"loss": 1.9733,
|
| 8136 |
+
"step": 1161
|
| 8137 |
+
},
|
| 8138 |
+
{
|
| 8139 |
+
"epoch": 0.06277179050860276,
|
| 8140 |
+
"grad_norm": 1.296700358390808,
|
| 8141 |
+
"learning_rate": 3.558785222111216e-06,
|
| 8142 |
+
"loss": 1.6777,
|
| 8143 |
+
"step": 1162
|
| 8144 |
+
},
|
| 8145 |
+
{
|
| 8146 |
+
"epoch": 0.06282581098236231,
|
| 8147 |
+
"grad_norm": 1.617156982421875,
|
| 8148 |
+
"learning_rate": 3.5146917429888126e-06,
|
| 8149 |
+
"loss": 2.1363,
|
| 8150 |
+
"step": 1163
|
| 8151 |
+
},
|
| 8152 |
+
{
|
| 8153 |
+
"epoch": 0.06287983145612187,
|
| 8154 |
+
"grad_norm": 1.359641432762146,
|
| 8155 |
+
"learning_rate": 3.4708631769914566e-06,
|
| 8156 |
+
"loss": 1.7016,
|
| 8157 |
+
"step": 1164
|
| 8158 |
+
},
|
| 8159 |
+
{
|
| 8160 |
+
"epoch": 0.06293385192988142,
|
| 8161 |
+
"grad_norm": 1.302148699760437,
|
| 8162 |
+
"learning_rate": 3.427299773891868e-06,
|
| 8163 |
+
"loss": 1.8483,
|
| 8164 |
+
"step": 1165
|
| 8165 |
+
},
|
| 8166 |
+
{
|
| 8167 |
+
"epoch": 0.06298787240364098,
|
| 8168 |
+
"grad_norm": 1.5809893608093262,
|
| 8169 |
+
"learning_rate": 3.3840017819516514e-06,
|
| 8170 |
+
"loss": 1.7038,
|
| 8171 |
+
"step": 1166
|
| 8172 |
+
},
|
| 8173 |
+
{
|
| 8174 |
+
"epoch": 0.06304189287740053,
|
| 8175 |
+
"grad_norm": 1.4229636192321777,
|
| 8176 |
+
"learning_rate": 3.340969447919873e-06,
|
| 8177 |
+
"loss": 1.8537,
|
| 8178 |
+
"step": 1167
|
| 8179 |
+
},
|
| 8180 |
+
{
|
| 8181 |
+
"epoch": 0.06309591335116009,
|
| 8182 |
+
"grad_norm": 1.3419435024261475,
|
| 8183 |
+
"learning_rate": 3.298203017031659e-06,
|
| 8184 |
+
"loss": 1.7454,
|
| 8185 |
+
"step": 1168
|
| 8186 |
+
},
|
| 8187 |
+
{
|
| 8188 |
+
"epoch": 0.06314993382491964,
|
| 8189 |
+
"grad_norm": 1.2680766582489014,
|
| 8190 |
+
"learning_rate": 3.2557027330067658e-06,
|
| 8191 |
+
"loss": 1.7434,
|
| 8192 |
+
"step": 1169
|
| 8193 |
+
},
|
| 8194 |
+
{
|
| 8195 |
+
"epoch": 0.0632039542986792,
|
| 8196 |
+
"grad_norm": 1.2496750354766846,
|
| 8197 |
+
"learning_rate": 3.213468838048267e-06,
|
| 8198 |
+
"loss": 1.5201,
|
| 8199 |
+
"step": 1170
|
| 8200 |
+
},
|
| 8201 |
+
{
|
| 8202 |
+
"epoch": 0.06325797477243876,
|
| 8203 |
+
"grad_norm": 1.1278547048568726,
|
| 8204 |
+
"learning_rate": 3.171501572841057e-06,
|
| 8205 |
+
"loss": 1.5326,
|
| 8206 |
+
"step": 1171
|
| 8207 |
+
},
|
| 8208 |
+
{
|
| 8209 |
+
"epoch": 0.06331199524619831,
|
| 8210 |
+
"grad_norm": 1.2952123880386353,
|
| 8211 |
+
"learning_rate": 3.1298011765506008e-06,
|
| 8212 |
+
"loss": 1.7643,
|
| 8213 |
+
"step": 1172
|
| 8214 |
+
},
|
| 8215 |
+
{
|
| 8216 |
+
"epoch": 0.06336601571995787,
|
| 8217 |
+
"grad_norm": 1.1559102535247803,
|
| 8218 |
+
"learning_rate": 3.0883678868214806e-06,
|
| 8219 |
+
"loss": 1.4957,
|
| 8220 |
+
"step": 1173
|
| 8221 |
+
},
|
| 8222 |
+
{
|
| 8223 |
+
"epoch": 0.06342003619371742,
|
| 8224 |
+
"grad_norm": 1.2650048732757568,
|
| 8225 |
+
"learning_rate": 3.0472019397761064e-06,
|
| 8226 |
+
"loss": 1.6619,
|
| 8227 |
+
"step": 1174
|
| 8228 |
+
},
|
| 8229 |
+
{
|
| 8230 |
+
"epoch": 0.06347405666747698,
|
| 8231 |
+
"grad_norm": 1.296645998954773,
|
| 8232 |
+
"learning_rate": 3.0063035700133026e-06,
|
| 8233 |
+
"loss": 1.6516,
|
| 8234 |
+
"step": 1175
|
| 8235 |
+
},
|
| 8236 |
+
{
|
| 8237 |
+
"epoch": 0.06352807714123652,
|
| 8238 |
+
"grad_norm": 1.3109478950500488,
|
| 8239 |
+
"learning_rate": 2.9656730106070617e-06,
|
| 8240 |
+
"loss": 1.774,
|
| 8241 |
+
"step": 1176
|
| 8242 |
+
},
|
| 8243 |
+
{
|
| 8244 |
+
"epoch": 0.06358209761499609,
|
| 8245 |
+
"grad_norm": 1.2235826253890991,
|
| 8246 |
+
"learning_rate": 2.925310493105099e-06,
|
| 8247 |
+
"loss": 1.7472,
|
| 8248 |
+
"step": 1177
|
| 8249 |
+
},
|
| 8250 |
+
{
|
| 8251 |
+
"epoch": 0.06363611808875563,
|
| 8252 |
+
"grad_norm": 1.3161101341247559,
|
| 8253 |
+
"learning_rate": 2.8852162475276555e-06,
|
| 8254 |
+
"loss": 1.8065,
|
| 8255 |
+
"step": 1178
|
| 8256 |
+
},
|
| 8257 |
+
{
|
| 8258 |
+
"epoch": 0.0636901385625152,
|
| 8259 |
+
"grad_norm": 1.0841140747070312,
|
| 8260 |
+
"learning_rate": 2.84539050236608e-06,
|
| 8261 |
+
"loss": 1.3016,
|
| 8262 |
+
"step": 1179
|
| 8263 |
+
},
|
| 8264 |
+
{
|
| 8265 |
+
"epoch": 0.06374415903627474,
|
| 8266 |
+
"grad_norm": 1.3080739974975586,
|
| 8267 |
+
"learning_rate": 2.8058334845816213e-06,
|
| 8268 |
+
"loss": 1.7162,
|
| 8269 |
+
"step": 1180
|
| 8270 |
+
},
|
| 8271 |
+
{
|
| 8272 |
+
"epoch": 0.0637981795100343,
|
| 8273 |
+
"grad_norm": 1.2737281322479248,
|
| 8274 |
+
"learning_rate": 2.7665454196040664e-06,
|
| 8275 |
+
"loss": 1.3938,
|
| 8276 |
+
"step": 1181
|
| 8277 |
+
},
|
| 8278 |
+
{
|
| 8279 |
+
"epoch": 0.06385219998379386,
|
| 8280 |
+
"grad_norm": 1.3282356262207031,
|
| 8281 |
+
"learning_rate": 2.7275265313304656e-06,
|
| 8282 |
+
"loss": 1.6578,
|
| 8283 |
+
"step": 1182
|
| 8284 |
+
},
|
| 8285 |
+
{
|
| 8286 |
+
"epoch": 0.06390622045755341,
|
| 8287 |
+
"grad_norm": 1.3617740869522095,
|
| 8288 |
+
"learning_rate": 2.6887770421238857e-06,
|
| 8289 |
+
"loss": 1.7983,
|
| 8290 |
+
"step": 1183
|
| 8291 |
+
},
|
| 8292 |
+
{
|
| 8293 |
+
"epoch": 0.06396024093131297,
|
| 8294 |
+
"grad_norm": 1.2881118059158325,
|
| 8295 |
+
"learning_rate": 2.650297172812144e-06,
|
| 8296 |
+
"loss": 1.6528,
|
| 8297 |
+
"step": 1184
|
| 8298 |
+
},
|
| 8299 |
+
{
|
| 8300 |
+
"epoch": 0.06401426140507252,
|
| 8301 |
+
"grad_norm": 1.1544610261917114,
|
| 8302 |
+
"learning_rate": 2.612087142686487e-06,
|
| 8303 |
+
"loss": 1.573,
|
| 8304 |
+
"step": 1185
|
| 8305 |
+
},
|
| 8306 |
+
{
|
| 8307 |
+
"epoch": 0.06406828187883208,
|
| 8308 |
+
"grad_norm": 1.206716775894165,
|
| 8309 |
+
"learning_rate": 2.574147169500435e-06,
|
| 8310 |
+
"loss": 1.5905,
|
| 8311 |
+
"step": 1186
|
| 8312 |
+
},
|
| 8313 |
+
{
|
| 8314 |
+
"epoch": 0.06412230235259163,
|
| 8315 |
+
"grad_norm": 1.2147915363311768,
|
| 8316 |
+
"learning_rate": 2.5364774694684623e-06,
|
| 8317 |
+
"loss": 1.653,
|
| 8318 |
+
"step": 1187
|
| 8319 |
+
},
|
| 8320 |
+
{
|
| 8321 |
+
"epoch": 0.06417632282635119,
|
| 8322 |
+
"grad_norm": 1.1852755546569824,
|
| 8323 |
+
"learning_rate": 2.4990782572647975e-06,
|
| 8324 |
+
"loss": 1.3946,
|
| 8325 |
+
"step": 1188
|
| 8326 |
+
},
|
| 8327 |
+
{
|
| 8328 |
+
"epoch": 0.06423034330011074,
|
| 8329 |
+
"grad_norm": 1.184801459312439,
|
| 8330 |
+
"learning_rate": 2.4619497460222184e-06,
|
| 8331 |
+
"loss": 1.5414,
|
| 8332 |
+
"step": 1189
|
| 8333 |
+
},
|
| 8334 |
+
{
|
| 8335 |
+
"epoch": 0.0642843637738703,
|
| 8336 |
+
"grad_norm": 1.2475887537002563,
|
| 8337 |
+
"learning_rate": 2.4250921473307874e-06,
|
| 8338 |
+
"loss": 1.4186,
|
| 8339 |
+
"step": 1190
|
| 8340 |
+
},
|
| 8341 |
+
{
|
| 8342 |
+
"epoch": 0.06433838424762985,
|
| 8343 |
+
"grad_norm": 1.2456693649291992,
|
| 8344 |
+
"learning_rate": 2.388505671236696e-06,
|
| 8345 |
+
"loss": 1.4534,
|
| 8346 |
+
"step": 1191
|
| 8347 |
+
},
|
| 8348 |
+
{
|
| 8349 |
+
"epoch": 0.0643924047213894,
|
| 8350 |
+
"grad_norm": 1.3935133218765259,
|
| 8351 |
+
"learning_rate": 2.3521905262410273e-06,
|
| 8352 |
+
"loss": 1.9064,
|
| 8353 |
+
"step": 1192
|
| 8354 |
+
},
|
| 8355 |
+
{
|
| 8356 |
+
"epoch": 0.06444642519514897,
|
| 8357 |
+
"grad_norm": 1.1352630853652954,
|
| 8358 |
+
"learning_rate": 2.316146919298623e-06,
|
| 8359 |
+
"loss": 1.5696,
|
| 8360 |
+
"step": 1193
|
| 8361 |
+
},
|
| 8362 |
+
{
|
| 8363 |
+
"epoch": 0.06450044566890852,
|
| 8364 |
+
"grad_norm": 1.3695409297943115,
|
| 8365 |
+
"learning_rate": 2.280375055816819e-06,
|
| 8366 |
+
"loss": 1.4925,
|
| 8367 |
+
"step": 1194
|
| 8368 |
+
},
|
| 8369 |
+
{
|
| 8370 |
+
"epoch": 0.06455446614266808,
|
| 8371 |
+
"grad_norm": 1.2332143783569336,
|
| 8372 |
+
"learning_rate": 2.2448751396543787e-06,
|
| 8373 |
+
"loss": 1.4993,
|
| 8374 |
+
"step": 1195
|
| 8375 |
+
},
|
| 8376 |
+
{
|
| 8377 |
+
"epoch": 0.06460848661642762,
|
| 8378 |
+
"grad_norm": 1.363611102104187,
|
| 8379 |
+
"learning_rate": 2.2096473731202327e-06,
|
| 8380 |
+
"loss": 1.5486,
|
| 8381 |
+
"step": 1196
|
| 8382 |
+
},
|
| 8383 |
+
{
|
| 8384 |
+
"epoch": 0.06466250709018719,
|
| 8385 |
+
"grad_norm": 1.2684926986694336,
|
| 8386 |
+
"learning_rate": 2.1746919569723855e-06,
|
| 8387 |
+
"loss": 1.5419,
|
| 8388 |
+
"step": 1197
|
| 8389 |
+
},
|
| 8390 |
+
{
|
| 8391 |
+
"epoch": 0.06471652756394673,
|
| 8392 |
+
"grad_norm": 1.2291020154953003,
|
| 8393 |
+
"learning_rate": 2.1400090904167504e-06,
|
| 8394 |
+
"loss": 1.5188,
|
| 8395 |
+
"step": 1198
|
| 8396 |
+
},
|
| 8397 |
+
{
|
| 8398 |
+
"epoch": 0.0647705480377063,
|
| 8399 |
+
"grad_norm": 1.6062101125717163,
|
| 8400 |
+
"learning_rate": 2.105598971106043e-06,
|
| 8401 |
+
"loss": 1.5267,
|
| 8402 |
+
"step": 1199
|
| 8403 |
+
},
|
| 8404 |
+
{
|
| 8405 |
+
"epoch": 0.06482456851146584,
|
| 8406 |
+
"grad_norm": 1.6337372064590454,
|
| 8407 |
+
"learning_rate": 2.0714617951385907e-06,
|
| 8408 |
+
"loss": 1.4514,
|
| 8409 |
+
"step": 1200
|
| 8410 |
+
},
|
| 8411 |
+
{
|
| 8412 |
+
"epoch": 0.0648785889852254,
|
| 8413 |
+
"grad_norm": 0.9939882159233093,
|
| 8414 |
+
"learning_rate": 2.0375977570572967e-06,
|
| 8415 |
+
"loss": 1.8241,
|
| 8416 |
+
"step": 1201
|
| 8417 |
+
},
|
| 8418 |
+
{
|
| 8419 |
+
"epoch": 0.06493260945898495,
|
| 8420 |
+
"grad_norm": 1.0961536169052124,
|
| 8421 |
+
"learning_rate": 2.004007049848461e-06,
|
| 8422 |
+
"loss": 2.0016,
|
| 8423 |
+
"step": 1202
|
| 8424 |
+
},
|
| 8425 |
+
{
|
| 8426 |
+
"epoch": 0.06498662993274451,
|
| 8427 |
+
"grad_norm": 1.1619789600372314,
|
| 8428 |
+
"learning_rate": 1.970689864940728e-06,
|
| 8429 |
+
"loss": 1.5946,
|
| 8430 |
+
"step": 1203
|
| 8431 |
+
},
|
| 8432 |
+
{
|
| 8433 |
+
"epoch": 0.06504065040650407,
|
| 8434 |
+
"grad_norm": 1.1330021619796753,
|
| 8435 |
+
"learning_rate": 1.937646392203962e-06,
|
| 8436 |
+
"loss": 1.6162,
|
| 8437 |
+
"step": 1204
|
| 8438 |
+
},
|
| 8439 |
+
{
|
| 8440 |
+
"epoch": 0.06509467088026362,
|
| 8441 |
+
"grad_norm": 1.2222954034805298,
|
| 8442 |
+
"learning_rate": 1.9048768199481982e-06,
|
| 8443 |
+
"loss": 1.9415,
|
| 8444 |
+
"step": 1205
|
| 8445 |
+
},
|
| 8446 |
+
{
|
| 8447 |
+
"epoch": 0.06514869135402318,
|
| 8448 |
+
"grad_norm": 1.2239118814468384,
|
| 8449 |
+
"learning_rate": 1.872381334922535e-06,
|
| 8450 |
+
"loss": 1.9242,
|
| 8451 |
+
"step": 1206
|
| 8452 |
+
},
|
| 8453 |
+
{
|
| 8454 |
+
"epoch": 0.06520271182778273,
|
| 8455 |
+
"grad_norm": 1.2445582151412964,
|
| 8456 |
+
"learning_rate": 1.8401601223141107e-06,
|
| 8457 |
+
"loss": 1.7698,
|
| 8458 |
+
"step": 1207
|
| 8459 |
+
},
|
| 8460 |
+
{
|
| 8461 |
+
"epoch": 0.06525673230154229,
|
| 8462 |
+
"grad_norm": 1.2918168306350708,
|
| 8463 |
+
"learning_rate": 1.8082133657469856e-06,
|
| 8464 |
+
"loss": 1.8023,
|
| 8465 |
+
"step": 1208
|
| 8466 |
+
},
|
| 8467 |
+
{
|
| 8468 |
+
"epoch": 0.06531075277530184,
|
| 8469 |
+
"grad_norm": 1.3816994428634644,
|
| 8470 |
+
"learning_rate": 1.7765412472811771e-06,
|
| 8471 |
+
"loss": 2.0021,
|
| 8472 |
+
"step": 1209
|
| 8473 |
+
},
|
| 8474 |
+
{
|
| 8475 |
+
"epoch": 0.0653647732490614,
|
| 8476 |
+
"grad_norm": 1.3551326990127563,
|
| 8477 |
+
"learning_rate": 1.7451439474115427e-06,
|
| 8478 |
+
"loss": 2.1065,
|
| 8479 |
+
"step": 1210
|
| 8480 |
+
},
|
| 8481 |
+
{
|
| 8482 |
+
"epoch": 0.06541879372282094,
|
| 8483 |
+
"grad_norm": 1.5808122158050537,
|
| 8484 |
+
"learning_rate": 1.7140216450668212e-06,
|
| 8485 |
+
"loss": 2.1777,
|
| 8486 |
+
"step": 1211
|
| 8487 |
+
},
|
| 8488 |
+
{
|
| 8489 |
+
"epoch": 0.0654728141965805,
|
| 8490 |
+
"grad_norm": 1.5340195894241333,
|
| 8491 |
+
"learning_rate": 1.6831745176085545e-06,
|
| 8492 |
+
"loss": 1.9859,
|
| 8493 |
+
"step": 1212
|
| 8494 |
+
},
|
| 8495 |
+
{
|
| 8496 |
+
"epoch": 0.06552683467034005,
|
| 8497 |
+
"grad_norm": 1.5701367855072021,
|
| 8498 |
+
"learning_rate": 1.6526027408301226e-06,
|
| 8499 |
+
"loss": 2.0914,
|
| 8500 |
+
"step": 1213
|
| 8501 |
+
},
|
| 8502 |
+
{
|
| 8503 |
+
"epoch": 0.06558085514409961,
|
| 8504 |
+
"grad_norm": 1.852514624595642,
|
| 8505 |
+
"learning_rate": 1.6223064889556939e-06,
|
| 8506 |
+
"loss": 1.8135,
|
| 8507 |
+
"step": 1214
|
| 8508 |
+
},
|
| 8509 |
+
{
|
| 8510 |
+
"epoch": 0.06563487561785916,
|
| 8511 |
+
"grad_norm": 1.995379090309143,
|
| 8512 |
+
"learning_rate": 1.592285934639287e-06,
|
| 8513 |
+
"loss": 1.9178,
|
| 8514 |
+
"step": 1215
|
| 8515 |
+
},
|
| 8516 |
+
{
|
| 8517 |
+
"epoch": 0.06568889609161872,
|
| 8518 |
+
"grad_norm": 1.4332178831100464,
|
| 8519 |
+
"learning_rate": 1.5625412489637337e-06,
|
| 8520 |
+
"loss": 1.7123,
|
| 8521 |
+
"step": 1216
|
| 8522 |
+
},
|
| 8523 |
+
{
|
| 8524 |
+
"epoch": 0.06574291656537828,
|
| 8525 |
+
"grad_norm": 1.9623318910598755,
|
| 8526 |
+
"learning_rate": 1.5330726014397668e-06,
|
| 8527 |
+
"loss": 2.1475,
|
| 8528 |
+
"step": 1217
|
| 8529 |
+
},
|
| 8530 |
+
{
|
| 8531 |
+
"epoch": 0.06579693703913783,
|
| 8532 |
+
"grad_norm": 1.4409209489822388,
|
| 8533 |
+
"learning_rate": 1.5038801600049835e-06,
|
| 8534 |
+
"loss": 1.9327,
|
| 8535 |
+
"step": 1218
|
| 8536 |
+
},
|
| 8537 |
+
{
|
| 8538 |
+
"epoch": 0.06585095751289739,
|
| 8539 |
+
"grad_norm": 1.303188443183899,
|
| 8540 |
+
"learning_rate": 1.4749640910229346e-06,
|
| 8541 |
+
"loss": 1.7793,
|
| 8542 |
+
"step": 1219
|
| 8543 |
+
},
|
| 8544 |
+
{
|
| 8545 |
+
"epoch": 0.06590497798665694,
|
| 8546 |
+
"grad_norm": 1.2902424335479736,
|
| 8547 |
+
"learning_rate": 1.4463245592821529e-06,
|
| 8548 |
+
"loss": 1.797,
|
| 8549 |
+
"step": 1220
|
| 8550 |
+
},
|
| 8551 |
+
{
|
| 8552 |
+
"epoch": 0.0659589984604165,
|
| 8553 |
+
"grad_norm": 1.5561325550079346,
|
| 8554 |
+
"learning_rate": 1.417961727995254e-06,
|
| 8555 |
+
"loss": 1.7494,
|
| 8556 |
+
"step": 1221
|
| 8557 |
+
},
|
| 8558 |
+
{
|
| 8559 |
+
"epoch": 0.06601301893417605,
|
| 8560 |
+
"grad_norm": 1.2781826257705688,
|
| 8561 |
+
"learning_rate": 1.3898757587979372e-06,
|
| 8562 |
+
"loss": 1.625,
|
| 8563 |
+
"step": 1222
|
| 8564 |
+
},
|
| 8565 |
+
{
|
| 8566 |
+
"epoch": 0.06606703940793561,
|
| 8567 |
+
"grad_norm": 1.0945980548858643,
|
| 8568 |
+
"learning_rate": 1.3620668117481472e-06,
|
| 8569 |
+
"loss": 1.5026,
|
| 8570 |
+
"step": 1223
|
| 8571 |
+
},
|
| 8572 |
+
{
|
| 8573 |
+
"epoch": 0.06612105988169516,
|
| 8574 |
+
"grad_norm": 1.2133638858795166,
|
| 8575 |
+
"learning_rate": 1.3345350453250748e-06,
|
| 8576 |
+
"loss": 1.6782,
|
| 8577 |
+
"step": 1224
|
| 8578 |
+
},
|
| 8579 |
+
{
|
| 8580 |
+
"epoch": 0.06617508035545472,
|
| 8581 |
+
"grad_norm": 1.2804362773895264,
|
| 8582 |
+
"learning_rate": 1.3072806164283358e-06,
|
| 8583 |
+
"loss": 1.7845,
|
| 8584 |
+
"step": 1225
|
| 8585 |
+
},
|
| 8586 |
+
{
|
| 8587 |
+
"epoch": 0.06622910082921427,
|
| 8588 |
+
"grad_norm": 1.236364722251892,
|
| 8589 |
+
"learning_rate": 1.2803036803770153e-06,
|
| 8590 |
+
"loss": 1.7664,
|
| 8591 |
+
"step": 1226
|
| 8592 |
+
},
|
| 8593 |
+
{
|
| 8594 |
+
"epoch": 0.06628312130297383,
|
| 8595 |
+
"grad_norm": 1.270398497581482,
|
| 8596 |
+
"learning_rate": 1.2536043909088191e-06,
|
| 8597 |
+
"loss": 1.716,
|
| 8598 |
+
"step": 1227
|
| 8599 |
+
},
|
| 8600 |
+
{
|
| 8601 |
+
"epoch": 0.06633714177673339,
|
| 8602 |
+
"grad_norm": 1.1642175912857056,
|
| 8603 |
+
"learning_rate": 1.2271829001791802e-06,
|
| 8604 |
+
"loss": 1.732,
|
| 8605 |
+
"step": 1228
|
| 8606 |
+
},
|
| 8607 |
+
{
|
| 8608 |
+
"epoch": 0.06639116225049294,
|
| 8609 |
+
"grad_norm": 1.3211133480072021,
|
| 8610 |
+
"learning_rate": 1.2010393587603974e-06,
|
| 8611 |
+
"loss": 1.61,
|
| 8612 |
+
"step": 1229
|
| 8613 |
+
},
|
| 8614 |
+
{
|
| 8615 |
+
"epoch": 0.0664451827242525,
|
| 8616 |
+
"grad_norm": 1.2467527389526367,
|
| 8617 |
+
"learning_rate": 1.1751739156407649e-06,
|
| 8618 |
+
"loss": 1.8514,
|
| 8619 |
+
"step": 1230
|
| 8620 |
+
},
|
| 8621 |
+
{
|
| 8622 |
+
"epoch": 0.06649920319801204,
|
| 8623 |
+
"grad_norm": 1.280394196510315,
|
| 8624 |
+
"learning_rate": 1.1495867182237608e-06,
|
| 8625 |
+
"loss": 1.689,
|
| 8626 |
+
"step": 1231
|
| 8627 |
+
},
|
| 8628 |
+
{
|
| 8629 |
+
"epoch": 0.0665532236717716,
|
| 8630 |
+
"grad_norm": 1.229986548423767,
|
| 8631 |
+
"learning_rate": 1.1242779123271486e-06,
|
| 8632 |
+
"loss": 1.4778,
|
| 8633 |
+
"step": 1232
|
| 8634 |
+
},
|
| 8635 |
+
{
|
| 8636 |
+
"epoch": 0.06660724414553115,
|
| 8637 |
+
"grad_norm": 1.4359259605407715,
|
| 8638 |
+
"learning_rate": 1.099247642182205e-06,
|
| 8639 |
+
"loss": 1.9047,
|
| 8640 |
+
"step": 1233
|
| 8641 |
+
},
|
| 8642 |
+
{
|
| 8643 |
+
"epoch": 0.06666126461929071,
|
| 8644 |
+
"grad_norm": 1.1409422159194946,
|
| 8645 |
+
"learning_rate": 1.0744960504328604e-06,
|
| 8646 |
+
"loss": 1.4491,
|
| 8647 |
+
"step": 1234
|
| 8648 |
+
},
|
| 8649 |
+
{
|
| 8650 |
+
"epoch": 0.06671528509305026,
|
| 8651 |
+
"grad_norm": 1.273908257484436,
|
| 8652 |
+
"learning_rate": 1.0500232781348928e-06,
|
| 8653 |
+
"loss": 1.4766,
|
| 8654 |
+
"step": 1235
|
| 8655 |
+
},
|
| 8656 |
+
{
|
| 8657 |
+
"epoch": 0.06676930556680982,
|
| 8658 |
+
"grad_norm": 1.3033802509307861,
|
| 8659 |
+
"learning_rate": 1.0258294647551292e-06,
|
| 8660 |
+
"loss": 1.6449,
|
| 8661 |
+
"step": 1236
|
| 8662 |
+
},
|
| 8663 |
+
{
|
| 8664 |
+
"epoch": 0.06682332604056937,
|
| 8665 |
+
"grad_norm": 1.2807811498641968,
|
| 8666 |
+
"learning_rate": 1.0019147481706625e-06,
|
| 8667 |
+
"loss": 1.6588,
|
| 8668 |
+
"step": 1237
|
| 8669 |
+
},
|
| 8670 |
+
{
|
| 8671 |
+
"epoch": 0.06687734651432893,
|
| 8672 |
+
"grad_norm": 1.0725313425064087,
|
| 8673 |
+
"learning_rate": 9.782792646680305e-07,
|
| 8674 |
+
"loss": 1.293,
|
| 8675 |
+
"step": 1238
|
| 8676 |
+
},
|
| 8677 |
+
{
|
| 8678 |
+
"epoch": 0.06693136698808849,
|
| 8679 |
+
"grad_norm": 1.1775662899017334,
|
| 8680 |
+
"learning_rate": 9.549231489424936e-07,
|
| 8681 |
+
"loss": 1.6809,
|
| 8682 |
+
"step": 1239
|
| 8683 |
+
},
|
| 8684 |
+
{
|
| 8685 |
+
"epoch": 0.06698538746184804,
|
| 8686 |
+
"grad_norm": 1.2584788799285889,
|
| 8687 |
+
"learning_rate": 9.318465340971916e-07,
|
| 8688 |
+
"loss": 1.451,
|
| 8689 |
+
"step": 1240
|
| 8690 |
+
},
|
| 8691 |
+
{
|
| 8692 |
+
"epoch": 0.0670394079356076,
|
| 8693 |
+
"grad_norm": 1.1575076580047607,
|
| 8694 |
+
"learning_rate": 9.090495516424713e-07,
|
| 8695 |
+
"loss": 1.2797,
|
| 8696 |
+
"step": 1241
|
| 8697 |
+
},
|
| 8698 |
+
{
|
| 8699 |
+
"epoch": 0.06709342840936715,
|
| 8700 |
+
"grad_norm": 1.241934895515442,
|
| 8701 |
+
"learning_rate": 8.865323314950657e-07,
|
| 8702 |
+
"loss": 1.4462,
|
| 8703 |
+
"step": 1242
|
| 8704 |
+
},
|
| 8705 |
+
{
|
| 8706 |
+
"epoch": 0.06714744888312671,
|
| 8707 |
+
"grad_norm": 1.2096850872039795,
|
| 8708 |
+
"learning_rate": 8.642950019773999e-07,
|
| 8709 |
+
"loss": 1.5011,
|
| 8710 |
+
"step": 1243
|
| 8711 |
+
},
|
| 8712 |
+
{
|
| 8713 |
+
"epoch": 0.06720146935688626,
|
| 8714 |
+
"grad_norm": 1.3328115940093994,
|
| 8715 |
+
"learning_rate": 8.423376898168245e-07,
|
| 8716 |
+
"loss": 1.7193,
|
| 8717 |
+
"step": 1244
|
| 8718 |
+
},
|
| 8719 |
+
{
|
| 8720 |
+
"epoch": 0.06725548983064582,
|
| 8721 |
+
"grad_norm": 1.3805053234100342,
|
| 8722 |
+
"learning_rate": 8.206605201449447e-07,
|
| 8723 |
+
"loss": 1.6029,
|
| 8724 |
+
"step": 1245
|
| 8725 |
+
},
|
| 8726 |
+
{
|
| 8727 |
+
"epoch": 0.06730951030440536,
|
| 8728 |
+
"grad_norm": 1.2903777360916138,
|
| 8729 |
+
"learning_rate": 7.992636164968204e-07,
|
| 8730 |
+
"loss": 1.5612,
|
| 8731 |
+
"step": 1246
|
| 8732 |
+
},
|
| 8733 |
+
{
|
| 8734 |
+
"epoch": 0.06736353077816493,
|
| 8735 |
+
"grad_norm": 1.3115580081939697,
|
| 8736 |
+
"learning_rate": 7.781471008103669e-07,
|
| 8737 |
+
"loss": 1.4125,
|
| 8738 |
+
"step": 1247
|
| 8739 |
+
},
|
| 8740 |
+
{
|
| 8741 |
+
"epoch": 0.06741755125192447,
|
| 8742 |
+
"grad_norm": 1.286320686340332,
|
| 8743 |
+
"learning_rate": 7.573110934255834e-07,
|
| 8744 |
+
"loss": 1.3553,
|
| 8745 |
+
"step": 1248
|
| 8746 |
+
},
|
| 8747 |
+
{
|
| 8748 |
+
"epoch": 0.06747157172568403,
|
| 8749 |
+
"grad_norm": 1.2746272087097168,
|
| 8750 |
+
"learning_rate": 7.36755713083892e-07,
|
| 8751 |
+
"loss": 1.5095,
|
| 8752 |
+
"step": 1249
|
| 8753 |
+
},
|
| 8754 |
+
{
|
| 8755 |
+
"epoch": 0.0675255921994436,
|
| 8756 |
+
"grad_norm": 1.2874985933303833,
|
| 8757 |
+
"learning_rate": 7.164810769274722e-07,
|
| 8758 |
+
"loss": 1.2177,
|
| 8759 |
+
"step": 1250
|
| 8760 |
+
},
|
| 8761 |
+
{
|
| 8762 |
+
"epoch": 0.06757961267320314,
|
| 8763 |
+
"grad_norm": 1.032150387763977,
|
| 8764 |
+
"learning_rate": 6.964873004985717e-07,
|
| 8765 |
+
"loss": 2.2141,
|
| 8766 |
+
"step": 1251
|
| 8767 |
+
},
|
| 8768 |
+
{
|
| 8769 |
+
"epoch": 0.0676336331469627,
|
| 8770 |
+
"grad_norm": 1.0659089088439941,
|
| 8771 |
+
"learning_rate": 6.76774497738869e-07,
|
| 8772 |
+
"loss": 1.8592,
|
| 8773 |
+
"step": 1252
|
| 8774 |
+
},
|
| 8775 |
+
{
|
| 8776 |
+
"epoch": 0.06768765362072225,
|
| 8777 |
+
"grad_norm": 1.06844961643219,
|
| 8778 |
+
"learning_rate": 6.573427809888067e-07,
|
| 8779 |
+
"loss": 1.7687,
|
| 8780 |
+
"step": 1253
|
| 8781 |
+
},
|
| 8782 |
+
{
|
| 8783 |
+
"epoch": 0.06774167409448181,
|
| 8784 |
+
"grad_norm": 1.147287368774414,
|
| 8785 |
+
"learning_rate": 6.381922609869528e-07,
|
| 8786 |
+
"loss": 1.9445,
|
| 8787 |
+
"step": 1254
|
| 8788 |
+
},
|
| 8789 |
+
{
|
| 8790 |
+
"epoch": 0.06779569456824136,
|
| 8791 |
+
"grad_norm": 1.1481311321258545,
|
| 8792 |
+
"learning_rate": 6.193230468693911e-07,
|
| 8793 |
+
"loss": 1.6554,
|
| 8794 |
+
"step": 1255
|
| 8795 |
+
},
|
| 8796 |
+
{
|
| 8797 |
+
"epoch": 0.06784971504200092,
|
| 8798 |
+
"grad_norm": 1.1168692111968994,
|
| 8799 |
+
"learning_rate": 6.007352461690707e-07,
|
| 8800 |
+
"loss": 1.7712,
|
| 8801 |
+
"step": 1256
|
| 8802 |
+
},
|
| 8803 |
+
{
|
| 8804 |
+
"epoch": 0.06790373551576047,
|
| 8805 |
+
"grad_norm": 1.1728993654251099,
|
| 8806 |
+
"learning_rate": 5.824289648152126e-07,
|
| 8807 |
+
"loss": 1.8542,
|
| 8808 |
+
"step": 1257
|
| 8809 |
+
},
|
| 8810 |
+
{
|
| 8811 |
+
"epoch": 0.06795775598952003,
|
| 8812 |
+
"grad_norm": 1.2998985052108765,
|
| 8813 |
+
"learning_rate": 5.644043071326932e-07,
|
| 8814 |
+
"loss": 2.1056,
|
| 8815 |
+
"step": 1258
|
| 8816 |
+
},
|
| 8817 |
+
{
|
| 8818 |
+
"epoch": 0.06801177646327958,
|
| 8819 |
+
"grad_norm": 4.602902412414551,
|
| 8820 |
+
"learning_rate": 5.466613758414674e-07,
|
| 8821 |
+
"loss": 2.2656,
|
| 8822 |
+
"step": 1259
|
| 8823 |
+
},
|
| 8824 |
+
{
|
| 8825 |
+
"epoch": 0.06806579693703914,
|
| 8826 |
+
"grad_norm": 1.5596973896026611,
|
| 8827 |
+
"learning_rate": 5.292002720559519e-07,
|
| 8828 |
+
"loss": 2.167,
|
| 8829 |
+
"step": 1260
|
| 8830 |
+
},
|
| 8831 |
+
{
|
| 8832 |
+
"epoch": 0.06811981741079869,
|
| 8833 |
+
"grad_norm": 1.417231559753418,
|
| 8834 |
+
"learning_rate": 5.120210952844872e-07,
|
| 8835 |
+
"loss": 1.763,
|
| 8836 |
+
"step": 1261
|
| 8837 |
+
},
|
| 8838 |
+
{
|
| 8839 |
+
"epoch": 0.06817383788455825,
|
| 8840 |
+
"grad_norm": 1.2590383291244507,
|
| 8841 |
+
"learning_rate": 4.951239434287491e-07,
|
| 8842 |
+
"loss": 1.8619,
|
| 8843 |
+
"step": 1262
|
| 8844 |
+
},
|
| 8845 |
+
{
|
| 8846 |
+
"epoch": 0.06822785835831781,
|
| 8847 |
+
"grad_norm": 1.4310927391052246,
|
| 8848 |
+
"learning_rate": 4.785089127831766e-07,
|
| 8849 |
+
"loss": 1.9718,
|
| 8850 |
+
"step": 1263
|
| 8851 |
+
},
|
| 8852 |
+
{
|
| 8853 |
+
"epoch": 0.06828187883207736,
|
| 8854 |
+
"grad_norm": 1.9252208471298218,
|
| 8855 |
+
"learning_rate": 4.621760980344725e-07,
|
| 8856 |
+
"loss": 1.7587,
|
| 8857 |
+
"step": 1264
|
| 8858 |
+
},
|
| 8859 |
+
{
|
| 8860 |
+
"epoch": 0.06833589930583692,
|
| 8861 |
+
"grad_norm": 2.3905534744262695,
|
| 8862 |
+
"learning_rate": 4.461255922609986e-07,
|
| 8863 |
+
"loss": 1.8996,
|
| 8864 |
+
"step": 1265
|
| 8865 |
+
},
|
| 8866 |
+
{
|
| 8867 |
+
"epoch": 0.06838991977959646,
|
| 8868 |
+
"grad_norm": 1.226621389389038,
|
| 8869 |
+
"learning_rate": 4.303574869322924e-07,
|
| 8870 |
+
"loss": 1.4957,
|
| 8871 |
+
"step": 1266
|
| 8872 |
+
},
|
| 8873 |
+
{
|
| 8874 |
+
"epoch": 0.06844394025335603,
|
| 8875 |
+
"grad_norm": 1.331422209739685,
|
| 8876 |
+
"learning_rate": 4.1487187190853984e-07,
|
| 8877 |
+
"loss": 1.632,
|
| 8878 |
+
"step": 1267
|
| 8879 |
+
},
|
| 8880 |
+
{
|
| 8881 |
+
"epoch": 0.06849796072711557,
|
| 8882 |
+
"grad_norm": 1.1910052299499512,
|
| 8883 |
+
"learning_rate": 3.996688354400424e-07,
|
| 8884 |
+
"loss": 1.7429,
|
| 8885 |
+
"step": 1268
|
| 8886 |
+
},
|
| 8887 |
+
{
|
| 8888 |
+
"epoch": 0.06855198120087513,
|
| 8889 |
+
"grad_norm": 1.2700560092926025,
|
| 8890 |
+
"learning_rate": 3.8474846416672874e-07,
|
| 8891 |
+
"loss": 1.6712,
|
| 8892 |
+
"step": 1269
|
| 8893 |
+
},
|
| 8894 |
+
{
|
| 8895 |
+
"epoch": 0.06860600167463468,
|
| 8896 |
+
"grad_norm": 1.3321186304092407,
|
| 8897 |
+
"learning_rate": 3.7011084311766586e-07,
|
| 8898 |
+
"loss": 1.9079,
|
| 8899 |
+
"step": 1270
|
| 8900 |
+
},
|
| 8901 |
+
{
|
| 8902 |
+
"epoch": 0.06866002214839424,
|
| 8903 |
+
"grad_norm": 1.314345121383667,
|
| 8904 |
+
"learning_rate": 3.5575605571055994e-07,
|
| 8905 |
+
"loss": 1.8941,
|
| 8906 |
+
"step": 1271
|
| 8907 |
+
},
|
| 8908 |
+
{
|
| 8909 |
+
"epoch": 0.06871404262215379,
|
| 8910 |
+
"grad_norm": 1.096266746520996,
|
| 8911 |
+
"learning_rate": 3.416841837512952e-07,
|
| 8912 |
+
"loss": 1.5468,
|
| 8913 |
+
"step": 1272
|
| 8914 |
+
},
|
| 8915 |
+
{
|
| 8916 |
+
"epoch": 0.06876806309591335,
|
| 8917 |
+
"grad_norm": 1.329594373703003,
|
| 8918 |
+
"learning_rate": 3.278953074334512e-07,
|
| 8919 |
+
"loss": 1.7978,
|
| 8920 |
+
"step": 1273
|
| 8921 |
+
},
|
| 8922 |
+
{
|
| 8923 |
+
"epoch": 0.06882208356967291,
|
| 8924 |
+
"grad_norm": 1.232609510421753,
|
| 8925 |
+
"learning_rate": 3.143895053378698e-07,
|
| 8926 |
+
"loss": 1.6848,
|
| 8927 |
+
"step": 1274
|
| 8928 |
+
},
|
| 8929 |
+
{
|
| 8930 |
+
"epoch": 0.06887610404343246,
|
| 8931 |
+
"grad_norm": 1.0414018630981445,
|
| 8932 |
+
"learning_rate": 3.011668544321833e-07,
|
| 8933 |
+
"loss": 1.2961,
|
| 8934 |
+
"step": 1275
|
| 8935 |
+
},
|
| 8936 |
+
{
|
| 8937 |
+
"epoch": 0.06893012451719202,
|
| 8938 |
+
"grad_norm": 1.2992279529571533,
|
| 8939 |
+
"learning_rate": 2.8822743007039244e-07,
|
| 8940 |
+
"loss": 1.5099,
|
| 8941 |
+
"step": 1276
|
| 8942 |
+
},
|
| 8943 |
+
{
|
| 8944 |
+
"epoch": 0.06898414499095157,
|
| 8945 |
+
"grad_norm": 1.0978848934173584,
|
| 8946 |
+
"learning_rate": 2.75571305992417e-07,
|
| 8947 |
+
"loss": 1.4086,
|
| 8948 |
+
"step": 1277
|
| 8949 |
+
},
|
| 8950 |
+
{
|
| 8951 |
+
"epoch": 0.06903816546471113,
|
| 8952 |
+
"grad_norm": 1.1189380884170532,
|
| 8953 |
+
"learning_rate": 2.63198554323707e-07,
|
| 8954 |
+
"loss": 1.412,
|
| 8955 |
+
"step": 1278
|
| 8956 |
+
},
|
| 8957 |
+
{
|
| 8958 |
+
"epoch": 0.06909218593847068,
|
| 8959 |
+
"grad_norm": 1.2583056688308716,
|
| 8960 |
+
"learning_rate": 2.511092455747932e-07,
|
| 8961 |
+
"loss": 1.6482,
|
| 8962 |
+
"step": 1279
|
| 8963 |
+
},
|
| 8964 |
+
{
|
| 8965 |
+
"epoch": 0.06914620641223024,
|
| 8966 |
+
"grad_norm": 1.3194609880447388,
|
| 8967 |
+
"learning_rate": 2.3930344864093733e-07,
|
| 8968 |
+
"loss": 1.6382,
|
| 8969 |
+
"step": 1280
|
| 8970 |
+
},
|
| 8971 |
+
{
|
| 8972 |
+
"epoch": 0.06920022688598979,
|
| 8973 |
+
"grad_norm": 1.1886677742004395,
|
| 8974 |
+
"learning_rate": 2.2778123080167135e-07,
|
| 8975 |
+
"loss": 1.5974,
|
| 8976 |
+
"step": 1281
|
| 8977 |
+
},
|
| 8978 |
+
{
|
| 8979 |
+
"epoch": 0.06925424735974935,
|
| 8980 |
+
"grad_norm": 1.2668043375015259,
|
| 8981 |
+
"learning_rate": 2.1654265772047543e-07,
|
| 8982 |
+
"loss": 1.6208,
|
| 8983 |
+
"step": 1282
|
| 8984 |
+
},
|
| 8985 |
+
{
|
| 8986 |
+
"epoch": 0.0693082678335089,
|
| 8987 |
+
"grad_norm": 1.2544898986816406,
|
| 8988 |
+
"learning_rate": 2.055877934443673e-07,
|
| 8989 |
+
"loss": 1.7541,
|
| 8990 |
+
"step": 1283
|
| 8991 |
+
},
|
| 8992 |
+
{
|
| 8993 |
+
"epoch": 0.06936228830726845,
|
| 8994 |
+
"grad_norm": 1.1625001430511475,
|
| 8995 |
+
"learning_rate": 1.9491670040355238e-07,
|
| 8996 |
+
"loss": 1.672,
|
| 8997 |
+
"step": 1284
|
| 8998 |
+
},
|
| 8999 |
+
{
|
| 9000 |
+
"epoch": 0.06941630878102802,
|
| 9001 |
+
"grad_norm": 1.293431043624878,
|
| 9002 |
+
"learning_rate": 1.8452943941106859e-07,
|
| 9003 |
+
"loss": 1.5477,
|
| 9004 |
+
"step": 1285
|
| 9005 |
+
},
|
| 9006 |
+
{
|
| 9007 |
+
"epoch": 0.06947032925478756,
|
| 9008 |
+
"grad_norm": 1.4472225904464722,
|
| 9009 |
+
"learning_rate": 1.7442606966242004e-07,
|
| 9010 |
+
"loss": 1.8822,
|
| 9011 |
+
"step": 1286
|
| 9012 |
+
},
|
| 9013 |
+
{
|
| 9014 |
+
"epoch": 0.06952434972854712,
|
| 9015 |
+
"grad_norm": 1.3418630361557007,
|
| 9016 |
+
"learning_rate": 1.6460664873528265e-07,
|
| 9017 |
+
"loss": 1.7605,
|
| 9018 |
+
"step": 1287
|
| 9019 |
+
},
|
| 9020 |
+
{
|
| 9021 |
+
"epoch": 0.06957837020230667,
|
| 9022 |
+
"grad_norm": 1.2076053619384766,
|
| 9023 |
+
"learning_rate": 1.5507123258911572e-07,
|
| 9024 |
+
"loss": 1.6393,
|
| 9025 |
+
"step": 1288
|
| 9026 |
+
},
|
| 9027 |
+
{
|
| 9028 |
+
"epoch": 0.06963239067606623,
|
| 9029 |
+
"grad_norm": 1.2483508586883545,
|
| 9030 |
+
"learning_rate": 1.4581987556490095e-07,
|
| 9031 |
+
"loss": 1.5722,
|
| 9032 |
+
"step": 1289
|
| 9033 |
+
},
|
| 9034 |
+
{
|
| 9035 |
+
"epoch": 0.06968641114982578,
|
| 9036 |
+
"grad_norm": 1.2065938711166382,
|
| 9037 |
+
"learning_rate": 1.3685263038479833e-07,
|
| 9038 |
+
"loss": 1.352,
|
| 9039 |
+
"step": 1290
|
| 9040 |
+
},
|
| 9041 |
+
{
|
| 9042 |
+
"epoch": 0.06974043162358534,
|
| 9043 |
+
"grad_norm": 1.3314545154571533,
|
| 9044 |
+
"learning_rate": 1.2816954815185743e-07,
|
| 9045 |
+
"loss": 1.5897,
|
| 9046 |
+
"step": 1291
|
| 9047 |
+
},
|
| 9048 |
+
{
|
| 9049 |
+
"epoch": 0.06979445209734489,
|
| 9050 |
+
"grad_norm": 1.3603909015655518,
|
| 9051 |
+
"learning_rate": 1.1977067834971766e-07,
|
| 9052 |
+
"loss": 1.6291,
|
| 9053 |
+
"step": 1292
|
| 9054 |
+
},
|
| 9055 |
+
{
|
| 9056 |
+
"epoch": 0.06984847257110445,
|
| 9057 |
+
"grad_norm": 1.4013954401016235,
|
| 9058 |
+
"learning_rate": 1.1165606884234181e-07,
|
| 9059 |
+
"loss": 1.6709,
|
| 9060 |
+
"step": 1293
|
| 9061 |
+
},
|
| 9062 |
+
{
|
| 9063 |
+
"epoch": 0.069902493044864,
|
| 9064 |
+
"grad_norm": 1.2276852130889893,
|
| 9065 |
+
"learning_rate": 1.0382576587372739e-07,
|
| 9066 |
+
"loss": 1.5565,
|
| 9067 |
+
"step": 1294
|
| 9068 |
+
},
|
| 9069 |
+
{
|
| 9070 |
+
"epoch": 0.06995651351862356,
|
| 9071 |
+
"grad_norm": 1.5686982870101929,
|
| 9072 |
+
"learning_rate": 9.62798140676513e-08,
|
| 9073 |
+
"loss": 1.6577,
|
| 9074 |
+
"step": 1295
|
| 9075 |
+
},
|
| 9076 |
+
{
|
| 9077 |
+
"epoch": 0.07001053399238312,
|
| 9078 |
+
"grad_norm": 1.4022105932235718,
|
| 9079 |
+
"learning_rate": 8.901825642741447e-08,
|
| 9080 |
+
"loss": 1.6766,
|
| 9081 |
+
"step": 1296
|
| 9082 |
+
},
|
| 9083 |
+
{
|
| 9084 |
+
"epoch": 0.07006455446614267,
|
| 9085 |
+
"grad_norm": 1.2123117446899414,
|
| 9086 |
+
"learning_rate": 8.204113433559201e-08,
|
| 9087 |
+
"loss": 1.5377,
|
| 9088 |
+
"step": 1297
|
| 9089 |
+
},
|
| 9090 |
+
{
|
| 9091 |
+
"epoch": 0.07011857493990223,
|
| 9092 |
+
"grad_norm": 1.2667269706726074,
|
| 9093 |
+
"learning_rate": 7.53484875538113e-08,
|
| 9094 |
+
"loss": 1.5383,
|
| 9095 |
+
"step": 1298
|
| 9096 |
+
},
|
| 9097 |
+
{
|
| 9098 |
+
"epoch": 0.07017259541366178,
|
| 9099 |
+
"grad_norm": 1.538053274154663,
|
| 9100 |
+
"learning_rate": 6.894035422250756e-08,
|
| 9101 |
+
"loss": 1.4201,
|
| 9102 |
+
"step": 1299
|
| 9103 |
+
},
|
| 9104 |
+
{
|
| 9105 |
+
"epoch": 0.07022661588742134,
|
| 9106 |
+
"grad_norm": 1.3648333549499512,
|
| 9107 |
+
"learning_rate": 6.281677086071303e-08,
|
| 9108 |
+
"loss": 1.2852,
|
| 9109 |
+
"step": 1300
|
| 9110 |
+
},
|
| 9111 |
+
{
|
| 9112 |
+
"epoch": 0.07028063636118088,
|
| 9113 |
+
"grad_norm": 0.9214191436767578,
|
| 9114 |
+
"learning_rate": 5.697777236585711e-08,
|
| 9115 |
+
"loss": 1.5883,
|
| 9116 |
+
"step": 1301
|
| 9117 |
+
},
|
| 9118 |
+
{
|
| 9119 |
+
"epoch": 0.07033465683494045,
|
| 9120 |
+
"grad_norm": 1.044727087020874,
|
| 9121 |
+
"learning_rate": 5.1423392013555395e-08,
|
| 9122 |
+
"loss": 1.8702,
|
| 9123 |
+
"step": 1302
|
| 9124 |
+
},
|
| 9125 |
+
{
|
| 9126 |
+
"epoch": 0.07038867730869999,
|
| 9127 |
+
"grad_norm": 1.012649655342102,
|
| 9128 |
+
"learning_rate": 4.6153661457426503e-08,
|
| 9129 |
+
"loss": 1.609,
|
| 9130 |
+
"step": 1303
|
| 9131 |
+
},
|
| 9132 |
+
{
|
| 9133 |
+
"epoch": 0.07044269778245955,
|
| 9134 |
+
"grad_norm": 1.1581029891967773,
|
| 9135 |
+
"learning_rate": 4.1168610728914425e-08,
|
| 9136 |
+
"loss": 1.8963,
|
| 9137 |
+
"step": 1304
|
| 9138 |
+
},
|
| 9139 |
+
{
|
| 9140 |
+
"epoch": 0.0704967182562191,
|
| 9141 |
+
"grad_norm": 1.151607632637024,
|
| 9142 |
+
"learning_rate": 3.6468268237105366e-08,
|
| 9143 |
+
"loss": 1.8106,
|
| 9144 |
+
"step": 1305
|
| 9145 |
+
},
|
| 9146 |
+
{
|
| 9147 |
+
"epoch": 0.07055073872997866,
|
| 9148 |
+
"grad_norm": 1.3642902374267578,
|
| 9149 |
+
"learning_rate": 3.205266076858893e-08,
|
| 9150 |
+
"loss": 1.8156,
|
| 9151 |
+
"step": 1306
|
| 9152 |
+
},
|
| 9153 |
+
{
|
| 9154 |
+
"epoch": 0.07060475920373821,
|
| 9155 |
+
"grad_norm": 1.3071428537368774,
|
| 9156 |
+
"learning_rate": 2.792181348726941e-08,
|
| 9157 |
+
"loss": 1.9135,
|
| 9158 |
+
"step": 1307
|
| 9159 |
+
},
|
| 9160 |
+
{
|
| 9161 |
+
"epoch": 0.07065877967749777,
|
| 9162 |
+
"grad_norm": 4.877688884735107,
|
| 9163 |
+
"learning_rate": 2.4075749934260317e-08,
|
| 9164 |
+
"loss": 2.1155,
|
| 9165 |
+
"step": 1308
|
| 9166 |
+
},
|
| 9167 |
+
{
|
| 9168 |
+
"epoch": 0.07071280015125733,
|
| 9169 |
+
"grad_norm": 1.3239308595657349,
|
| 9170 |
+
"learning_rate": 2.0514492027728926e-08,
|
| 9171 |
+
"loss": 1.7787,
|
| 9172 |
+
"step": 1309
|
| 9173 |
+
},
|
| 9174 |
+
{
|
| 9175 |
+
"epoch": 0.07076682062501688,
|
| 9176 |
+
"grad_norm": 1.4881937503814697,
|
| 9177 |
+
"learning_rate": 1.7238060062774175e-08,
|
| 9178 |
+
"loss": 2.3092,
|
| 9179 |
+
"step": 1310
|
| 9180 |
+
},
|
| 9181 |
+
{
|
| 9182 |
+
"epoch": 0.07082084109877644,
|
| 9183 |
+
"grad_norm": 1.6437569856643677,
|
| 9184 |
+
"learning_rate": 1.4246472711310078e-08,
|
| 9185 |
+
"loss": 1.9419,
|
| 9186 |
+
"step": 1311
|
| 9187 |
+
},
|
| 9188 |
+
{
|
| 9189 |
+
"epoch": 0.07087486157253599,
|
| 9190 |
+
"grad_norm": 1.3334624767303467,
|
| 9191 |
+
"learning_rate": 1.153974702197136e-08,
|
| 9192 |
+
"loss": 1.7677,
|
| 9193 |
+
"step": 1312
|
| 9194 |
+
},
|
| 9195 |
+
{
|
| 9196 |
+
"epoch": 0.07092888204629555,
|
| 9197 |
+
"grad_norm": 4.003887176513672,
|
| 9198 |
+
"learning_rate": 9.117898419991333e-09,
|
| 9199 |
+
"loss": 1.8225,
|
| 9200 |
+
"step": 1313
|
| 9201 |
+
},
|
| 9202 |
+
{
|
| 9203 |
+
"epoch": 0.0709829025200551,
|
| 9204 |
+
"grad_norm": 2.7670540809631348,
|
| 9205 |
+
"learning_rate": 6.980940707146389e-09,
|
| 9206 |
+
"loss": 2.1558,
|
| 9207 |
+
"step": 1314
|
| 9208 |
+
},
|
| 9209 |
+
{
|
| 9210 |
+
"epoch": 0.07103692299381466,
|
| 9211 |
+
"grad_norm": 1.832947850227356,
|
| 9212 |
+
"learning_rate": 5.128886061656068e-09,
|
| 9213 |
+
"loss": 1.9205,
|
| 9214 |
+
"step": 1315
|
| 9215 |
+
},
|
| 9216 |
+
{
|
| 9217 |
+
"epoch": 0.0710909434675742,
|
| 9218 |
+
"grad_norm": 1.7930538654327393,
|
| 9219 |
+
"learning_rate": 3.5617450381053576e-09,
|
| 9220 |
+
"loss": 2.0155,
|
| 9221 |
+
"step": 1316
|
| 9222 |
+
},
|
| 9223 |
+
{
|
| 9224 |
+
"epoch": 0.07114496394133377,
|
| 9225 |
+
"grad_norm": 1.714447021484375,
|
| 9226 |
+
"learning_rate": 2.279526567411372e-09,
|
| 9227 |
+
"loss": 2.003,
|
| 9228 |
+
"step": 1317
|
| 9229 |
+
},
|
| 9230 |
+
{
|
| 9231 |
+
"epoch": 0.07119898441509331,
|
| 9232 |
+
"grad_norm": 1.2400060892105103,
|
| 9233 |
+
"learning_rate": 1.2822379567567488e-09,
|
| 9234 |
+
"loss": 1.6643,
|
| 9235 |
+
"step": 1318
|
| 9236 |
+
},
|
| 9237 |
+
{
|
| 9238 |
+
"epoch": 0.07125300488885287,
|
| 9239 |
+
"grad_norm": 1.3478233814239502,
|
| 9240 |
+
"learning_rate": 5.698848895396847e-10,
|
| 9241 |
+
"loss": 1.6145,
|
| 9242 |
+
"step": 1319
|
| 9243 |
+
},
|
| 9244 |
+
{
|
| 9245 |
+
"epoch": 0.07130702536261244,
|
| 9246 |
+
"grad_norm": 1.170430064201355,
|
| 9247 |
+
"learning_rate": 1.4247142536838454e-10,
|
| 9248 |
+
"loss": 1.8467,
|
| 9249 |
+
"step": 1320
|
| 9250 |
+
},
|
| 9251 |
+
{
|
| 9252 |
+
"epoch": 0.07136104583637198,
|
| 9253 |
+
"grad_norm": 1.0377249717712402,
|
| 9254 |
+
"learning_rate": 0.0,
|
| 9255 |
+
"loss": 1.4566,
|
| 9256 |
+
"step": 1321
|
| 9257 |
}
|
| 9258 |
],
|
| 9259 |
"logging_steps": 1,
|
|
|
|
| 9268 |
"should_evaluate": false,
|
| 9269 |
"should_log": false,
|
| 9270 |
"should_save": true,
|
| 9271 |
+
"should_training_stop": true
|
| 9272 |
},
|
| 9273 |
"attributes": {}
|
| 9274 |
}
|
| 9275 |
},
|
| 9276 |
+
"total_flos": 6.442801532869018e+16,
|
| 9277 |
"train_batch_size": 4,
|
| 9278 |
"trial_name": null,
|
| 9279 |
"trial_params": null
|