Fanucci
commited on
Training in progress, step 750, checkpoint
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +1063 -5
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 341314644
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:25fac366caff47f3aa93b902a05a2db29e00272a8c7b84d3056cc6748adca598
|
| 3 |
size 341314644
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f0b30a3c14ac7dc4dae39d2d5246237636a6a753e5435443e23ebc635d2d081d
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6a994b6cec1e66468861e932ae00811a4390034f78f383e9ebce5b2fbf8eafd7
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": NaN,
|
| 3 |
"best_model_checkpoint": "miner_id_24/checkpoint-150",
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 150,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -4247,6 +4247,1064 @@
|
|
| 4247 |
"eval_samples_per_second": 6.686,
|
| 4248 |
"eval_steps_per_second": 3.343,
|
| 4249 |
"step": 600
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4250 |
}
|
| 4251 |
],
|
| 4252 |
"logging_steps": 1,
|
|
@@ -4261,7 +5319,7 @@
|
|
| 4261 |
"early_stopping_threshold": 0.0
|
| 4262 |
},
|
| 4263 |
"attributes": {
|
| 4264 |
-
"early_stopping_patience_counter":
|
| 4265 |
}
|
| 4266 |
},
|
| 4267 |
"TrainerControl": {
|
|
@@ -4270,12 +5328,12 @@
|
|
| 4270 |
"should_evaluate": false,
|
| 4271 |
"should_log": false,
|
| 4272 |
"should_save": true,
|
| 4273 |
-
"should_training_stop":
|
| 4274 |
},
|
| 4275 |
"attributes": {}
|
| 4276 |
}
|
| 4277 |
},
|
| 4278 |
-
"total_flos":
|
| 4279 |
"train_batch_size": 2,
|
| 4280 |
"trial_name": null,
|
| 4281 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": NaN,
|
| 3 |
"best_model_checkpoint": "miner_id_24/checkpoint-150",
|
| 4 |
+
"epoch": 0.1574307304785894,
|
| 5 |
"eval_steps": 150,
|
| 6 |
+
"global_step": 750,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 4247 |
"eval_samples_per_second": 6.686,
|
| 4248 |
"eval_steps_per_second": 3.343,
|
| 4249 |
"step": 600
|
| 4250 |
+
},
|
| 4251 |
+
{
|
| 4252 |
+
"epoch": 0.12615449202350965,
|
| 4253 |
+
"grad_norm": NaN,
|
| 4254 |
+
"learning_rate": 0.00011780272298947175,
|
| 4255 |
+
"loss": 0.0,
|
| 4256 |
+
"step": 601
|
| 4257 |
+
},
|
| 4258 |
+
{
|
| 4259 |
+
"epoch": 0.12636439966414778,
|
| 4260 |
+
"grad_norm": NaN,
|
| 4261 |
+
"learning_rate": 0.00011757093414507408,
|
| 4262 |
+
"loss": 0.0,
|
| 4263 |
+
"step": 602
|
| 4264 |
+
},
|
| 4265 |
+
{
|
| 4266 |
+
"epoch": 0.1265743073047859,
|
| 4267 |
+
"grad_norm": NaN,
|
| 4268 |
+
"learning_rate": 0.000117339047850476,
|
| 4269 |
+
"loss": 0.0,
|
| 4270 |
+
"step": 603
|
| 4271 |
+
},
|
| 4272 |
+
{
|
| 4273 |
+
"epoch": 0.12678421494542402,
|
| 4274 |
+
"grad_norm": NaN,
|
| 4275 |
+
"learning_rate": 0.00011710706539174268,
|
| 4276 |
+
"loss": 0.0,
|
| 4277 |
+
"step": 604
|
| 4278 |
+
},
|
| 4279 |
+
{
|
| 4280 |
+
"epoch": 0.12699412258606213,
|
| 4281 |
+
"grad_norm": NaN,
|
| 4282 |
+
"learning_rate": 0.00011687498805547257,
|
| 4283 |
+
"loss": 0.0,
|
| 4284 |
+
"step": 605
|
| 4285 |
+
},
|
| 4286 |
+
{
|
| 4287 |
+
"epoch": 0.12720403022670027,
|
| 4288 |
+
"grad_norm": NaN,
|
| 4289 |
+
"learning_rate": 0.00011664281712879033,
|
| 4290 |
+
"loss": 0.0,
|
| 4291 |
+
"step": 606
|
| 4292 |
+
},
|
| 4293 |
+
{
|
| 4294 |
+
"epoch": 0.12741393786733837,
|
| 4295 |
+
"grad_norm": NaN,
|
| 4296 |
+
"learning_rate": 0.00011641055389933977,
|
| 4297 |
+
"loss": 0.0,
|
| 4298 |
+
"step": 607
|
| 4299 |
+
},
|
| 4300 |
+
{
|
| 4301 |
+
"epoch": 0.12762384550797648,
|
| 4302 |
+
"grad_norm": NaN,
|
| 4303 |
+
"learning_rate": 0.0001161781996552765,
|
| 4304 |
+
"loss": 0.0,
|
| 4305 |
+
"step": 608
|
| 4306 |
+
},
|
| 4307 |
+
{
|
| 4308 |
+
"epoch": 0.12783375314861462,
|
| 4309 |
+
"grad_norm": NaN,
|
| 4310 |
+
"learning_rate": 0.00011594575568526094,
|
| 4311 |
+
"loss": 0.0,
|
| 4312 |
+
"step": 609
|
| 4313 |
+
},
|
| 4314 |
+
{
|
| 4315 |
+
"epoch": 0.12804366078925272,
|
| 4316 |
+
"grad_norm": NaN,
|
| 4317 |
+
"learning_rate": 0.00011571322327845116,
|
| 4318 |
+
"loss": 0.0,
|
| 4319 |
+
"step": 610
|
| 4320 |
+
},
|
| 4321 |
+
{
|
| 4322 |
+
"epoch": 0.12825356842989086,
|
| 4323 |
+
"grad_norm": NaN,
|
| 4324 |
+
"learning_rate": 0.00011548060372449571,
|
| 4325 |
+
"loss": 0.0,
|
| 4326 |
+
"step": 611
|
| 4327 |
+
},
|
| 4328 |
+
{
|
| 4329 |
+
"epoch": 0.12846347607052896,
|
| 4330 |
+
"grad_norm": NaN,
|
| 4331 |
+
"learning_rate": 0.00011524789831352649,
|
| 4332 |
+
"loss": 0.0,
|
| 4333 |
+
"step": 612
|
| 4334 |
+
},
|
| 4335 |
+
{
|
| 4336 |
+
"epoch": 0.1286733837111671,
|
| 4337 |
+
"grad_norm": NaN,
|
| 4338 |
+
"learning_rate": 0.0001150151083361515,
|
| 4339 |
+
"loss": 0.0,
|
| 4340 |
+
"step": 613
|
| 4341 |
+
},
|
| 4342 |
+
{
|
| 4343 |
+
"epoch": 0.1288832913518052,
|
| 4344 |
+
"grad_norm": NaN,
|
| 4345 |
+
"learning_rate": 0.00011478223508344783,
|
| 4346 |
+
"loss": 0.0,
|
| 4347 |
+
"step": 614
|
| 4348 |
+
},
|
| 4349 |
+
{
|
| 4350 |
+
"epoch": 0.12909319899244331,
|
| 4351 |
+
"grad_norm": NaN,
|
| 4352 |
+
"learning_rate": 0.00011454927984695438,
|
| 4353 |
+
"loss": 0.0,
|
| 4354 |
+
"step": 615
|
| 4355 |
+
},
|
| 4356 |
+
{
|
| 4357 |
+
"epoch": 0.12930310663308145,
|
| 4358 |
+
"grad_norm": NaN,
|
| 4359 |
+
"learning_rate": 0.00011431624391866476,
|
| 4360 |
+
"loss": 0.0,
|
| 4361 |
+
"step": 616
|
| 4362 |
+
},
|
| 4363 |
+
{
|
| 4364 |
+
"epoch": 0.12951301427371956,
|
| 4365 |
+
"grad_norm": NaN,
|
| 4366 |
+
"learning_rate": 0.00011408312859102009,
|
| 4367 |
+
"loss": 0.0,
|
| 4368 |
+
"step": 617
|
| 4369 |
+
},
|
| 4370 |
+
{
|
| 4371 |
+
"epoch": 0.1297229219143577,
|
| 4372 |
+
"grad_norm": NaN,
|
| 4373 |
+
"learning_rate": 0.00011384993515690183,
|
| 4374 |
+
"loss": 0.0,
|
| 4375 |
+
"step": 618
|
| 4376 |
+
},
|
| 4377 |
+
{
|
| 4378 |
+
"epoch": 0.1299328295549958,
|
| 4379 |
+
"grad_norm": NaN,
|
| 4380 |
+
"learning_rate": 0.00011361666490962468,
|
| 4381 |
+
"loss": 0.0,
|
| 4382 |
+
"step": 619
|
| 4383 |
+
},
|
| 4384 |
+
{
|
| 4385 |
+
"epoch": 0.13014273719563393,
|
| 4386 |
+
"grad_norm": NaN,
|
| 4387 |
+
"learning_rate": 0.0001133833191429293,
|
| 4388 |
+
"loss": 0.0,
|
| 4389 |
+
"step": 620
|
| 4390 |
+
},
|
| 4391 |
+
{
|
| 4392 |
+
"epoch": 0.13035264483627204,
|
| 4393 |
+
"grad_norm": NaN,
|
| 4394 |
+
"learning_rate": 0.00011314989915097523,
|
| 4395 |
+
"loss": 0.0,
|
| 4396 |
+
"step": 621
|
| 4397 |
+
},
|
| 4398 |
+
{
|
| 4399 |
+
"epoch": 0.13056255247691015,
|
| 4400 |
+
"grad_norm": NaN,
|
| 4401 |
+
"learning_rate": 0.00011291640622833362,
|
| 4402 |
+
"loss": 0.0,
|
| 4403 |
+
"step": 622
|
| 4404 |
+
},
|
| 4405 |
+
{
|
| 4406 |
+
"epoch": 0.13077246011754828,
|
| 4407 |
+
"grad_norm": NaN,
|
| 4408 |
+
"learning_rate": 0.0001126828416699802,
|
| 4409 |
+
"loss": 0.0,
|
| 4410 |
+
"step": 623
|
| 4411 |
+
},
|
| 4412 |
+
{
|
| 4413 |
+
"epoch": 0.1309823677581864,
|
| 4414 |
+
"grad_norm": NaN,
|
| 4415 |
+
"learning_rate": 0.00011244920677128786,
|
| 4416 |
+
"loss": 0.0,
|
| 4417 |
+
"step": 624
|
| 4418 |
+
},
|
| 4419 |
+
{
|
| 4420 |
+
"epoch": 0.13119227539882453,
|
| 4421 |
+
"grad_norm": NaN,
|
| 4422 |
+
"learning_rate": 0.00011221550282801971,
|
| 4423 |
+
"loss": 0.0,
|
| 4424 |
+
"step": 625
|
| 4425 |
+
},
|
| 4426 |
+
{
|
| 4427 |
+
"epoch": 0.13140218303946263,
|
| 4428 |
+
"grad_norm": NaN,
|
| 4429 |
+
"learning_rate": 0.00011198173113632177,
|
| 4430 |
+
"loss": 0.0,
|
| 4431 |
+
"step": 626
|
| 4432 |
+
},
|
| 4433 |
+
{
|
| 4434 |
+
"epoch": 0.13161209068010077,
|
| 4435 |
+
"grad_norm": NaN,
|
| 4436 |
+
"learning_rate": 0.00011174789299271577,
|
| 4437 |
+
"loss": 0.0,
|
| 4438 |
+
"step": 627
|
| 4439 |
+
},
|
| 4440 |
+
{
|
| 4441 |
+
"epoch": 0.13182199832073888,
|
| 4442 |
+
"grad_norm": NaN,
|
| 4443 |
+
"learning_rate": 0.000111513989694092,
|
| 4444 |
+
"loss": 0.0,
|
| 4445 |
+
"step": 628
|
| 4446 |
+
},
|
| 4447 |
+
{
|
| 4448 |
+
"epoch": 0.13203190596137698,
|
| 4449 |
+
"grad_norm": NaN,
|
| 4450 |
+
"learning_rate": 0.00011128002253770211,
|
| 4451 |
+
"loss": 0.0,
|
| 4452 |
+
"step": 629
|
| 4453 |
+
},
|
| 4454 |
+
{
|
| 4455 |
+
"epoch": 0.13224181360201512,
|
| 4456 |
+
"grad_norm": NaN,
|
| 4457 |
+
"learning_rate": 0.00011104599282115191,
|
| 4458 |
+
"loss": 0.0,
|
| 4459 |
+
"step": 630
|
| 4460 |
+
},
|
| 4461 |
+
{
|
| 4462 |
+
"epoch": 0.13245172124265323,
|
| 4463 |
+
"grad_norm": NaN,
|
| 4464 |
+
"learning_rate": 0.00011081190184239419,
|
| 4465 |
+
"loss": 0.0,
|
| 4466 |
+
"step": 631
|
| 4467 |
+
},
|
| 4468 |
+
{
|
| 4469 |
+
"epoch": 0.13266162888329136,
|
| 4470 |
+
"grad_norm": NaN,
|
| 4471 |
+
"learning_rate": 0.0001105777508997215,
|
| 4472 |
+
"loss": 0.0,
|
| 4473 |
+
"step": 632
|
| 4474 |
+
},
|
| 4475 |
+
{
|
| 4476 |
+
"epoch": 0.13287153652392947,
|
| 4477 |
+
"grad_norm": NaN,
|
| 4478 |
+
"learning_rate": 0.0001103435412917589,
|
| 4479 |
+
"loss": 0.0,
|
| 4480 |
+
"step": 633
|
| 4481 |
+
},
|
| 4482 |
+
{
|
| 4483 |
+
"epoch": 0.1330814441645676,
|
| 4484 |
+
"grad_norm": NaN,
|
| 4485 |
+
"learning_rate": 0.00011010927431745692,
|
| 4486 |
+
"loss": 0.0,
|
| 4487 |
+
"step": 634
|
| 4488 |
+
},
|
| 4489 |
+
{
|
| 4490 |
+
"epoch": 0.1332913518052057,
|
| 4491 |
+
"grad_norm": NaN,
|
| 4492 |
+
"learning_rate": 0.0001098749512760842,
|
| 4493 |
+
"loss": 0.0,
|
| 4494 |
+
"step": 635
|
| 4495 |
+
},
|
| 4496 |
+
{
|
| 4497 |
+
"epoch": 0.13350125944584382,
|
| 4498 |
+
"grad_norm": NaN,
|
| 4499 |
+
"learning_rate": 0.0001096405734672203,
|
| 4500 |
+
"loss": 0.0,
|
| 4501 |
+
"step": 636
|
| 4502 |
+
},
|
| 4503 |
+
{
|
| 4504 |
+
"epoch": 0.13371116708648195,
|
| 4505 |
+
"grad_norm": NaN,
|
| 4506 |
+
"learning_rate": 0.00010940614219074854,
|
| 4507 |
+
"loss": 0.0,
|
| 4508 |
+
"step": 637
|
| 4509 |
+
},
|
| 4510 |
+
{
|
| 4511 |
+
"epoch": 0.13392107472712006,
|
| 4512 |
+
"grad_norm": NaN,
|
| 4513 |
+
"learning_rate": 0.0001091716587468488,
|
| 4514 |
+
"loss": 0.0,
|
| 4515 |
+
"step": 638
|
| 4516 |
+
},
|
| 4517 |
+
{
|
| 4518 |
+
"epoch": 0.1341309823677582,
|
| 4519 |
+
"grad_norm": NaN,
|
| 4520 |
+
"learning_rate": 0.00010893712443599028,
|
| 4521 |
+
"loss": 0.0,
|
| 4522 |
+
"step": 639
|
| 4523 |
+
},
|
| 4524 |
+
{
|
| 4525 |
+
"epoch": 0.1343408900083963,
|
| 4526 |
+
"grad_norm": NaN,
|
| 4527 |
+
"learning_rate": 0.0001087025405589243,
|
| 4528 |
+
"loss": 0.0,
|
| 4529 |
+
"step": 640
|
| 4530 |
+
},
|
| 4531 |
+
{
|
| 4532 |
+
"epoch": 0.13455079764903444,
|
| 4533 |
+
"grad_norm": NaN,
|
| 4534 |
+
"learning_rate": 0.00010846790841667705,
|
| 4535 |
+
"loss": 0.0,
|
| 4536 |
+
"step": 641
|
| 4537 |
+
},
|
| 4538 |
+
{
|
| 4539 |
+
"epoch": 0.13476070528967254,
|
| 4540 |
+
"grad_norm": NaN,
|
| 4541 |
+
"learning_rate": 0.00010823322931054246,
|
| 4542 |
+
"loss": 0.0,
|
| 4543 |
+
"step": 642
|
| 4544 |
+
},
|
| 4545 |
+
{
|
| 4546 |
+
"epoch": 0.13497061293031065,
|
| 4547 |
+
"grad_norm": NaN,
|
| 4548 |
+
"learning_rate": 0.0001079985045420748,
|
| 4549 |
+
"loss": 0.0,
|
| 4550 |
+
"step": 643
|
| 4551 |
+
},
|
| 4552 |
+
{
|
| 4553 |
+
"epoch": 0.1351805205709488,
|
| 4554 |
+
"grad_norm": NaN,
|
| 4555 |
+
"learning_rate": 0.0001077637354130818,
|
| 4556 |
+
"loss": 0.0,
|
| 4557 |
+
"step": 644
|
| 4558 |
+
},
|
| 4559 |
+
{
|
| 4560 |
+
"epoch": 0.1353904282115869,
|
| 4561 |
+
"grad_norm": NaN,
|
| 4562 |
+
"learning_rate": 0.00010752892322561701,
|
| 4563 |
+
"loss": 0.0,
|
| 4564 |
+
"step": 645
|
| 4565 |
+
},
|
| 4566 |
+
{
|
| 4567 |
+
"epoch": 0.13560033585222503,
|
| 4568 |
+
"grad_norm": NaN,
|
| 4569 |
+
"learning_rate": 0.00010729406928197289,
|
| 4570 |
+
"loss": 0.0,
|
| 4571 |
+
"step": 646
|
| 4572 |
+
},
|
| 4573 |
+
{
|
| 4574 |
+
"epoch": 0.13581024349286314,
|
| 4575 |
+
"grad_norm": NaN,
|
| 4576 |
+
"learning_rate": 0.00010705917488467345,
|
| 4577 |
+
"loss": 0.0,
|
| 4578 |
+
"step": 647
|
| 4579 |
+
},
|
| 4580 |
+
{
|
| 4581 |
+
"epoch": 0.13602015113350127,
|
| 4582 |
+
"grad_norm": NaN,
|
| 4583 |
+
"learning_rate": 0.0001068242413364671,
|
| 4584 |
+
"loss": 0.0,
|
| 4585 |
+
"step": 648
|
| 4586 |
+
},
|
| 4587 |
+
{
|
| 4588 |
+
"epoch": 0.13623005877413938,
|
| 4589 |
+
"grad_norm": NaN,
|
| 4590 |
+
"learning_rate": 0.00010658926994031937,
|
| 4591 |
+
"loss": 0.0,
|
| 4592 |
+
"step": 649
|
| 4593 |
+
},
|
| 4594 |
+
{
|
| 4595 |
+
"epoch": 0.13643996641477749,
|
| 4596 |
+
"grad_norm": NaN,
|
| 4597 |
+
"learning_rate": 0.00010635426199940566,
|
| 4598 |
+
"loss": 0.0,
|
| 4599 |
+
"step": 650
|
| 4600 |
+
},
|
| 4601 |
+
{
|
| 4602 |
+
"epoch": 0.13664987405541562,
|
| 4603 |
+
"grad_norm": NaN,
|
| 4604 |
+
"learning_rate": 0.00010611921881710409,
|
| 4605 |
+
"loss": 0.0,
|
| 4606 |
+
"step": 651
|
| 4607 |
+
},
|
| 4608 |
+
{
|
| 4609 |
+
"epoch": 0.13685978169605373,
|
| 4610 |
+
"grad_norm": NaN,
|
| 4611 |
+
"learning_rate": 0.00010588414169698824,
|
| 4612 |
+
"loss": 0.0,
|
| 4613 |
+
"step": 652
|
| 4614 |
+
},
|
| 4615 |
+
{
|
| 4616 |
+
"epoch": 0.13706968933669186,
|
| 4617 |
+
"grad_norm": NaN,
|
| 4618 |
+
"learning_rate": 0.00010564903194281988,
|
| 4619 |
+
"loss": 0.0,
|
| 4620 |
+
"step": 653
|
| 4621 |
+
},
|
| 4622 |
+
{
|
| 4623 |
+
"epoch": 0.13727959697732997,
|
| 4624 |
+
"grad_norm": NaN,
|
| 4625 |
+
"learning_rate": 0.00010541389085854176,
|
| 4626 |
+
"loss": 0.0,
|
| 4627 |
+
"step": 654
|
| 4628 |
+
},
|
| 4629 |
+
{
|
| 4630 |
+
"epoch": 0.1374895046179681,
|
| 4631 |
+
"grad_norm": NaN,
|
| 4632 |
+
"learning_rate": 0.00010517871974827047,
|
| 4633 |
+
"loss": 0.0,
|
| 4634 |
+
"step": 655
|
| 4635 |
+
},
|
| 4636 |
+
{
|
| 4637 |
+
"epoch": 0.1376994122586062,
|
| 4638 |
+
"grad_norm": NaN,
|
| 4639 |
+
"learning_rate": 0.00010494351991628903,
|
| 4640 |
+
"loss": 0.0,
|
| 4641 |
+
"step": 656
|
| 4642 |
+
},
|
| 4643 |
+
{
|
| 4644 |
+
"epoch": 0.13790931989924432,
|
| 4645 |
+
"grad_norm": NaN,
|
| 4646 |
+
"learning_rate": 0.00010470829266703983,
|
| 4647 |
+
"loss": 0.0,
|
| 4648 |
+
"step": 657
|
| 4649 |
+
},
|
| 4650 |
+
{
|
| 4651 |
+
"epoch": 0.13811922753988246,
|
| 4652 |
+
"grad_norm": NaN,
|
| 4653 |
+
"learning_rate": 0.00010447303930511727,
|
| 4654 |
+
"loss": 0.0,
|
| 4655 |
+
"step": 658
|
| 4656 |
+
},
|
| 4657 |
+
{
|
| 4658 |
+
"epoch": 0.13832913518052056,
|
| 4659 |
+
"grad_norm": NaN,
|
| 4660 |
+
"learning_rate": 0.00010423776113526058,
|
| 4661 |
+
"loss": 0.0,
|
| 4662 |
+
"step": 659
|
| 4663 |
+
},
|
| 4664 |
+
{
|
| 4665 |
+
"epoch": 0.1385390428211587,
|
| 4666 |
+
"grad_norm": NaN,
|
| 4667 |
+
"learning_rate": 0.00010400245946234659,
|
| 4668 |
+
"loss": 0.0,
|
| 4669 |
+
"step": 660
|
| 4670 |
+
},
|
| 4671 |
+
{
|
| 4672 |
+
"epoch": 0.1387489504617968,
|
| 4673 |
+
"grad_norm": NaN,
|
| 4674 |
+
"learning_rate": 0.0001037671355913825,
|
| 4675 |
+
"loss": 0.0,
|
| 4676 |
+
"step": 661
|
| 4677 |
+
},
|
| 4678 |
+
{
|
| 4679 |
+
"epoch": 0.13895885810243494,
|
| 4680 |
+
"grad_norm": NaN,
|
| 4681 |
+
"learning_rate": 0.00010353179082749857,
|
| 4682 |
+
"loss": 0.0,
|
| 4683 |
+
"step": 662
|
| 4684 |
+
},
|
| 4685 |
+
{
|
| 4686 |
+
"epoch": 0.13916876574307305,
|
| 4687 |
+
"grad_norm": NaN,
|
| 4688 |
+
"learning_rate": 0.00010329642647594102,
|
| 4689 |
+
"loss": 0.0,
|
| 4690 |
+
"step": 663
|
| 4691 |
+
},
|
| 4692 |
+
{
|
| 4693 |
+
"epoch": 0.13937867338371115,
|
| 4694 |
+
"grad_norm": NaN,
|
| 4695 |
+
"learning_rate": 0.0001030610438420646,
|
| 4696 |
+
"loss": 0.0,
|
| 4697 |
+
"step": 664
|
| 4698 |
+
},
|
| 4699 |
+
{
|
| 4700 |
+
"epoch": 0.1395885810243493,
|
| 4701 |
+
"grad_norm": NaN,
|
| 4702 |
+
"learning_rate": 0.0001028256442313255,
|
| 4703 |
+
"loss": 0.0,
|
| 4704 |
+
"step": 665
|
| 4705 |
+
},
|
| 4706 |
+
{
|
| 4707 |
+
"epoch": 0.1397984886649874,
|
| 4708 |
+
"grad_norm": NaN,
|
| 4709 |
+
"learning_rate": 0.00010259022894927411,
|
| 4710 |
+
"loss": 0.0,
|
| 4711 |
+
"step": 666
|
| 4712 |
+
},
|
| 4713 |
+
{
|
| 4714 |
+
"epoch": 0.14000839630562553,
|
| 4715 |
+
"grad_norm": NaN,
|
| 4716 |
+
"learning_rate": 0.00010235479930154763,
|
| 4717 |
+
"loss": 0.0,
|
| 4718 |
+
"step": 667
|
| 4719 |
+
},
|
| 4720 |
+
{
|
| 4721 |
+
"epoch": 0.14021830394626364,
|
| 4722 |
+
"grad_norm": NaN,
|
| 4723 |
+
"learning_rate": 0.0001021193565938631,
|
| 4724 |
+
"loss": 0.0,
|
| 4725 |
+
"step": 668
|
| 4726 |
+
},
|
| 4727 |
+
{
|
| 4728 |
+
"epoch": 0.14042821158690177,
|
| 4729 |
+
"grad_norm": NaN,
|
| 4730 |
+
"learning_rate": 0.0001018839021320098,
|
| 4731 |
+
"loss": 0.0,
|
| 4732 |
+
"step": 669
|
| 4733 |
+
},
|
| 4734 |
+
{
|
| 4735 |
+
"epoch": 0.14063811922753988,
|
| 4736 |
+
"grad_norm": NaN,
|
| 4737 |
+
"learning_rate": 0.00010164843722184232,
|
| 4738 |
+
"loss": 0.0,
|
| 4739 |
+
"step": 670
|
| 4740 |
+
},
|
| 4741 |
+
{
|
| 4742 |
+
"epoch": 0.140848026868178,
|
| 4743 |
+
"grad_norm": NaN,
|
| 4744 |
+
"learning_rate": 0.0001014129631692732,
|
| 4745 |
+
"loss": 0.0,
|
| 4746 |
+
"step": 671
|
| 4747 |
+
},
|
| 4748 |
+
{
|
| 4749 |
+
"epoch": 0.14105793450881612,
|
| 4750 |
+
"grad_norm": NaN,
|
| 4751 |
+
"learning_rate": 0.00010117748128026561,
|
| 4752 |
+
"loss": 0.0,
|
| 4753 |
+
"step": 672
|
| 4754 |
+
},
|
| 4755 |
+
{
|
| 4756 |
+
"epoch": 0.14126784214945423,
|
| 4757 |
+
"grad_norm": NaN,
|
| 4758 |
+
"learning_rate": 0.00010094199286082624,
|
| 4759 |
+
"loss": 0.0,
|
| 4760 |
+
"step": 673
|
| 4761 |
+
},
|
| 4762 |
+
{
|
| 4763 |
+
"epoch": 0.14147774979009237,
|
| 4764 |
+
"grad_norm": NaN,
|
| 4765 |
+
"learning_rate": 0.000100706499216998,
|
| 4766 |
+
"loss": 0.0,
|
| 4767 |
+
"step": 674
|
| 4768 |
+
},
|
| 4769 |
+
{
|
| 4770 |
+
"epoch": 0.14168765743073047,
|
| 4771 |
+
"grad_norm": NaN,
|
| 4772 |
+
"learning_rate": 0.00010047100165485272,
|
| 4773 |
+
"loss": 0.0,
|
| 4774 |
+
"step": 675
|
| 4775 |
+
},
|
| 4776 |
+
{
|
| 4777 |
+
"epoch": 0.1418975650713686,
|
| 4778 |
+
"grad_norm": NaN,
|
| 4779 |
+
"learning_rate": 0.00010023550148048404,
|
| 4780 |
+
"loss": 0.0,
|
| 4781 |
+
"step": 676
|
| 4782 |
+
},
|
| 4783 |
+
{
|
| 4784 |
+
"epoch": 0.14210747271200672,
|
| 4785 |
+
"grad_norm": NaN,
|
| 4786 |
+
"learning_rate": 0.0001,
|
| 4787 |
+
"loss": 0.0,
|
| 4788 |
+
"step": 677
|
| 4789 |
+
},
|
| 4790 |
+
{
|
| 4791 |
+
"epoch": 0.14231738035264482,
|
| 4792 |
+
"grad_norm": NaN,
|
| 4793 |
+
"learning_rate": 9.9764498519516e-05,
|
| 4794 |
+
"loss": 0.0,
|
| 4795 |
+
"step": 678
|
| 4796 |
+
},
|
| 4797 |
+
{
|
| 4798 |
+
"epoch": 0.14252728799328296,
|
| 4799 |
+
"grad_norm": NaN,
|
| 4800 |
+
"learning_rate": 9.95289983451473e-05,
|
| 4801 |
+
"loss": 0.0,
|
| 4802 |
+
"step": 679
|
| 4803 |
+
},
|
| 4804 |
+
{
|
| 4805 |
+
"epoch": 0.14273719563392107,
|
| 4806 |
+
"grad_norm": NaN,
|
| 4807 |
+
"learning_rate": 9.929350078300203e-05,
|
| 4808 |
+
"loss": 0.0,
|
| 4809 |
+
"step": 680
|
| 4810 |
+
},
|
| 4811 |
+
{
|
| 4812 |
+
"epoch": 0.1429471032745592,
|
| 4813 |
+
"grad_norm": NaN,
|
| 4814 |
+
"learning_rate": 9.905800713917379e-05,
|
| 4815 |
+
"loss": 0.0,
|
| 4816 |
+
"step": 681
|
| 4817 |
+
},
|
| 4818 |
+
{
|
| 4819 |
+
"epoch": 0.1431570109151973,
|
| 4820 |
+
"grad_norm": NaN,
|
| 4821 |
+
"learning_rate": 9.882251871973441e-05,
|
| 4822 |
+
"loss": 0.0,
|
| 4823 |
+
"step": 682
|
| 4824 |
+
},
|
| 4825 |
+
{
|
| 4826 |
+
"epoch": 0.14336691855583544,
|
| 4827 |
+
"grad_norm": NaN,
|
| 4828 |
+
"learning_rate": 9.858703683072683e-05,
|
| 4829 |
+
"loss": 0.0,
|
| 4830 |
+
"step": 683
|
| 4831 |
+
},
|
| 4832 |
+
{
|
| 4833 |
+
"epoch": 0.14357682619647355,
|
| 4834 |
+
"grad_norm": NaN,
|
| 4835 |
+
"learning_rate": 9.835156277815767e-05,
|
| 4836 |
+
"loss": 0.0,
|
| 4837 |
+
"step": 684
|
| 4838 |
+
},
|
| 4839 |
+
{
|
| 4840 |
+
"epoch": 0.14378673383711166,
|
| 4841 |
+
"grad_norm": NaN,
|
| 4842 |
+
"learning_rate": 9.811609786799021e-05,
|
| 4843 |
+
"loss": 0.0,
|
| 4844 |
+
"step": 685
|
| 4845 |
+
},
|
| 4846 |
+
{
|
| 4847 |
+
"epoch": 0.1439966414777498,
|
| 4848 |
+
"grad_norm": NaN,
|
| 4849 |
+
"learning_rate": 9.788064340613691e-05,
|
| 4850 |
+
"loss": 0.0,
|
| 4851 |
+
"step": 686
|
| 4852 |
+
},
|
| 4853 |
+
{
|
| 4854 |
+
"epoch": 0.1442065491183879,
|
| 4855 |
+
"grad_norm": NaN,
|
| 4856 |
+
"learning_rate": 9.764520069845237e-05,
|
| 4857 |
+
"loss": 0.0,
|
| 4858 |
+
"step": 687
|
| 4859 |
+
},
|
| 4860 |
+
{
|
| 4861 |
+
"epoch": 0.14441645675902604,
|
| 4862 |
+
"grad_norm": NaN,
|
| 4863 |
+
"learning_rate": 9.740977105072591e-05,
|
| 4864 |
+
"loss": 0.0,
|
| 4865 |
+
"step": 688
|
| 4866 |
+
},
|
| 4867 |
+
{
|
| 4868 |
+
"epoch": 0.14462636439966414,
|
| 4869 |
+
"grad_norm": NaN,
|
| 4870 |
+
"learning_rate": 9.71743557686745e-05,
|
| 4871 |
+
"loss": 0.0,
|
| 4872 |
+
"step": 689
|
| 4873 |
+
},
|
| 4874 |
+
{
|
| 4875 |
+
"epoch": 0.14483627204030228,
|
| 4876 |
+
"grad_norm": NaN,
|
| 4877 |
+
"learning_rate": 9.693895615793542e-05,
|
| 4878 |
+
"loss": 0.0,
|
| 4879 |
+
"step": 690
|
| 4880 |
+
},
|
| 4881 |
+
{
|
| 4882 |
+
"epoch": 0.14504617968094038,
|
| 4883 |
+
"grad_norm": NaN,
|
| 4884 |
+
"learning_rate": 9.670357352405899e-05,
|
| 4885 |
+
"loss": 0.0,
|
| 4886 |
+
"step": 691
|
| 4887 |
+
},
|
| 4888 |
+
{
|
| 4889 |
+
"epoch": 0.1452560873215785,
|
| 4890 |
+
"grad_norm": NaN,
|
| 4891 |
+
"learning_rate": 9.646820917250145e-05,
|
| 4892 |
+
"loss": 0.0,
|
| 4893 |
+
"step": 692
|
| 4894 |
+
},
|
| 4895 |
+
{
|
| 4896 |
+
"epoch": 0.14546599496221663,
|
| 4897 |
+
"grad_norm": NaN,
|
| 4898 |
+
"learning_rate": 9.623286440861753e-05,
|
| 4899 |
+
"loss": 0.0,
|
| 4900 |
+
"step": 693
|
| 4901 |
+
},
|
| 4902 |
+
{
|
| 4903 |
+
"epoch": 0.14567590260285473,
|
| 4904 |
+
"grad_norm": NaN,
|
| 4905 |
+
"learning_rate": 9.599754053765344e-05,
|
| 4906 |
+
"loss": 0.0,
|
| 4907 |
+
"step": 694
|
| 4908 |
+
},
|
| 4909 |
+
{
|
| 4910 |
+
"epoch": 0.14588581024349287,
|
| 4911 |
+
"grad_norm": NaN,
|
| 4912 |
+
"learning_rate": 9.576223886473947e-05,
|
| 4913 |
+
"loss": 0.0,
|
| 4914 |
+
"step": 695
|
| 4915 |
+
},
|
| 4916 |
+
{
|
| 4917 |
+
"epoch": 0.14609571788413098,
|
| 4918 |
+
"grad_norm": NaN,
|
| 4919 |
+
"learning_rate": 9.552696069488278e-05,
|
| 4920 |
+
"loss": 0.0,
|
| 4921 |
+
"step": 696
|
| 4922 |
+
},
|
| 4923 |
+
{
|
| 4924 |
+
"epoch": 0.1463056255247691,
|
| 4925 |
+
"grad_norm": NaN,
|
| 4926 |
+
"learning_rate": 9.529170733296021e-05,
|
| 4927 |
+
"loss": 0.0,
|
| 4928 |
+
"step": 697
|
| 4929 |
+
},
|
| 4930 |
+
{
|
| 4931 |
+
"epoch": 0.14651553316540722,
|
| 4932 |
+
"grad_norm": NaN,
|
| 4933 |
+
"learning_rate": 9.505648008371096e-05,
|
| 4934 |
+
"loss": 0.0,
|
| 4935 |
+
"step": 698
|
| 4936 |
+
},
|
| 4937 |
+
{
|
| 4938 |
+
"epoch": 0.14672544080604533,
|
| 4939 |
+
"grad_norm": NaN,
|
| 4940 |
+
"learning_rate": 9.482128025172956e-05,
|
| 4941 |
+
"loss": 0.0,
|
| 4942 |
+
"step": 699
|
| 4943 |
+
},
|
| 4944 |
+
{
|
| 4945 |
+
"epoch": 0.14693534844668346,
|
| 4946 |
+
"grad_norm": NaN,
|
| 4947 |
+
"learning_rate": 9.458610914145826e-05,
|
| 4948 |
+
"loss": 0.0,
|
| 4949 |
+
"step": 700
|
| 4950 |
+
},
|
| 4951 |
+
{
|
| 4952 |
+
"epoch": 0.14714525608732157,
|
| 4953 |
+
"grad_norm": NaN,
|
| 4954 |
+
"learning_rate": 9.435096805718016e-05,
|
| 4955 |
+
"loss": 0.0,
|
| 4956 |
+
"step": 701
|
| 4957 |
+
},
|
| 4958 |
+
{
|
| 4959 |
+
"epoch": 0.1473551637279597,
|
| 4960 |
+
"grad_norm": NaN,
|
| 4961 |
+
"learning_rate": 9.411585830301178e-05,
|
| 4962 |
+
"loss": 0.0,
|
| 4963 |
+
"step": 702
|
| 4964 |
+
},
|
| 4965 |
+
{
|
| 4966 |
+
"epoch": 0.1475650713685978,
|
| 4967 |
+
"grad_norm": NaN,
|
| 4968 |
+
"learning_rate": 9.388078118289593e-05,
|
| 4969 |
+
"loss": 0.0,
|
| 4970 |
+
"step": 703
|
| 4971 |
+
},
|
| 4972 |
+
{
|
| 4973 |
+
"epoch": 0.14777497900923595,
|
| 4974 |
+
"grad_norm": NaN,
|
| 4975 |
+
"learning_rate": 9.364573800059437e-05,
|
| 4976 |
+
"loss": 0.0,
|
| 4977 |
+
"step": 704
|
| 4978 |
+
},
|
| 4979 |
+
{
|
| 4980 |
+
"epoch": 0.14798488664987405,
|
| 4981 |
+
"grad_norm": NaN,
|
| 4982 |
+
"learning_rate": 9.341073005968065e-05,
|
| 4983 |
+
"loss": 0.0,
|
| 4984 |
+
"step": 705
|
| 4985 |
+
},
|
| 4986 |
+
{
|
| 4987 |
+
"epoch": 0.1481947942905122,
|
| 4988 |
+
"grad_norm": NaN,
|
| 4989 |
+
"learning_rate": 9.317575866353292e-05,
|
| 4990 |
+
"loss": 0.0,
|
| 4991 |
+
"step": 706
|
| 4992 |
+
},
|
| 4993 |
+
{
|
| 4994 |
+
"epoch": 0.1484047019311503,
|
| 4995 |
+
"grad_norm": NaN,
|
| 4996 |
+
"learning_rate": 9.294082511532658e-05,
|
| 4997 |
+
"loss": 0.0,
|
| 4998 |
+
"step": 707
|
| 4999 |
+
},
|
| 5000 |
+
{
|
| 5001 |
+
"epoch": 0.1486146095717884,
|
| 5002 |
+
"grad_norm": NaN,
|
| 5003 |
+
"learning_rate": 9.270593071802714e-05,
|
| 5004 |
+
"loss": 0.0,
|
| 5005 |
+
"step": 708
|
| 5006 |
+
},
|
| 5007 |
+
{
|
| 5008 |
+
"epoch": 0.14882451721242654,
|
| 5009 |
+
"grad_norm": NaN,
|
| 5010 |
+
"learning_rate": 9.247107677438302e-05,
|
| 5011 |
+
"loss": 0.0,
|
| 5012 |
+
"step": 709
|
| 5013 |
+
},
|
| 5014 |
+
{
|
| 5015 |
+
"epoch": 0.14903442485306465,
|
| 5016 |
+
"grad_norm": NaN,
|
| 5017 |
+
"learning_rate": 9.223626458691823e-05,
|
| 5018 |
+
"loss": 0.0,
|
| 5019 |
+
"step": 710
|
| 5020 |
+
},
|
| 5021 |
+
{
|
| 5022 |
+
"epoch": 0.14924433249370278,
|
| 5023 |
+
"grad_norm": NaN,
|
| 5024 |
+
"learning_rate": 9.20014954579252e-05,
|
| 5025 |
+
"loss": 0.0,
|
| 5026 |
+
"step": 711
|
| 5027 |
+
},
|
| 5028 |
+
{
|
| 5029 |
+
"epoch": 0.1494542401343409,
|
| 5030 |
+
"grad_norm": NaN,
|
| 5031 |
+
"learning_rate": 9.176677068945755e-05,
|
| 5032 |
+
"loss": 0.0,
|
| 5033 |
+
"step": 712
|
| 5034 |
+
},
|
| 5035 |
+
{
|
| 5036 |
+
"epoch": 0.14966414777497902,
|
| 5037 |
+
"grad_norm": NaN,
|
| 5038 |
+
"learning_rate": 9.153209158332295e-05,
|
| 5039 |
+
"loss": 0.0,
|
| 5040 |
+
"step": 713
|
| 5041 |
+
},
|
| 5042 |
+
{
|
| 5043 |
+
"epoch": 0.14987405541561713,
|
| 5044 |
+
"grad_norm": NaN,
|
| 5045 |
+
"learning_rate": 9.129745944107571e-05,
|
| 5046 |
+
"loss": 0.0,
|
| 5047 |
+
"step": 714
|
| 5048 |
+
},
|
| 5049 |
+
{
|
| 5050 |
+
"epoch": 0.15008396305625524,
|
| 5051 |
+
"grad_norm": NaN,
|
| 5052 |
+
"learning_rate": 9.106287556400973e-05,
|
| 5053 |
+
"loss": 0.0,
|
| 5054 |
+
"step": 715
|
| 5055 |
+
},
|
| 5056 |
+
{
|
| 5057 |
+
"epoch": 0.15029387069689337,
|
| 5058 |
+
"grad_norm": NaN,
|
| 5059 |
+
"learning_rate": 9.082834125315122e-05,
|
| 5060 |
+
"loss": 0.0,
|
| 5061 |
+
"step": 716
|
| 5062 |
+
},
|
| 5063 |
+
{
|
| 5064 |
+
"epoch": 0.15050377833753148,
|
| 5065 |
+
"grad_norm": NaN,
|
| 5066 |
+
"learning_rate": 9.059385780925148e-05,
|
| 5067 |
+
"loss": 0.0,
|
| 5068 |
+
"step": 717
|
| 5069 |
+
},
|
| 5070 |
+
{
|
| 5071 |
+
"epoch": 0.15071368597816961,
|
| 5072 |
+
"grad_norm": NaN,
|
| 5073 |
+
"learning_rate": 9.035942653277972e-05,
|
| 5074 |
+
"loss": 0.0,
|
| 5075 |
+
"step": 718
|
| 5076 |
+
},
|
| 5077 |
+
{
|
| 5078 |
+
"epoch": 0.15092359361880772,
|
| 5079 |
+
"grad_norm": NaN,
|
| 5080 |
+
"learning_rate": 9.012504872391581e-05,
|
| 5081 |
+
"loss": 0.0,
|
| 5082 |
+
"step": 719
|
| 5083 |
+
},
|
| 5084 |
+
{
|
| 5085 |
+
"epoch": 0.15113350125944586,
|
| 5086 |
+
"grad_norm": NaN,
|
| 5087 |
+
"learning_rate": 8.98907256825431e-05,
|
| 5088 |
+
"loss": 0.0,
|
| 5089 |
+
"step": 720
|
| 5090 |
+
},
|
| 5091 |
+
{
|
| 5092 |
+
"epoch": 0.15134340890008396,
|
| 5093 |
+
"grad_norm": NaN,
|
| 5094 |
+
"learning_rate": 8.965645870824112e-05,
|
| 5095 |
+
"loss": 0.0,
|
| 5096 |
+
"step": 721
|
| 5097 |
+
},
|
| 5098 |
+
{
|
| 5099 |
+
"epoch": 0.15155331654072207,
|
| 5100 |
+
"grad_norm": NaN,
|
| 5101 |
+
"learning_rate": 8.942224910027856e-05,
|
| 5102 |
+
"loss": 0.0,
|
| 5103 |
+
"step": 722
|
| 5104 |
+
},
|
| 5105 |
+
{
|
| 5106 |
+
"epoch": 0.1517632241813602,
|
| 5107 |
+
"grad_norm": NaN,
|
| 5108 |
+
"learning_rate": 8.918809815760585e-05,
|
| 5109 |
+
"loss": 0.0,
|
| 5110 |
+
"step": 723
|
| 5111 |
+
},
|
| 5112 |
+
{
|
| 5113 |
+
"epoch": 0.15197313182199831,
|
| 5114 |
+
"grad_norm": NaN,
|
| 5115 |
+
"learning_rate": 8.895400717884814e-05,
|
| 5116 |
+
"loss": 0.0,
|
| 5117 |
+
"step": 724
|
| 5118 |
+
},
|
| 5119 |
+
{
|
| 5120 |
+
"epoch": 0.15218303946263645,
|
| 5121 |
+
"grad_norm": NaN,
|
| 5122 |
+
"learning_rate": 8.87199774622979e-05,
|
| 5123 |
+
"loss": 0.0,
|
| 5124 |
+
"step": 725
|
| 5125 |
+
},
|
| 5126 |
+
{
|
| 5127 |
+
"epoch": 0.15239294710327456,
|
| 5128 |
+
"grad_norm": NaN,
|
| 5129 |
+
"learning_rate": 8.8486010305908e-05,
|
| 5130 |
+
"loss": 0.0,
|
| 5131 |
+
"step": 726
|
| 5132 |
+
},
|
| 5133 |
+
{
|
| 5134 |
+
"epoch": 0.1526028547439127,
|
| 5135 |
+
"grad_norm": NaN,
|
| 5136 |
+
"learning_rate": 8.825210700728425e-05,
|
| 5137 |
+
"loss": 0.0,
|
| 5138 |
+
"step": 727
|
| 5139 |
+
},
|
| 5140 |
+
{
|
| 5141 |
+
"epoch": 0.1528127623845508,
|
| 5142 |
+
"grad_norm": NaN,
|
| 5143 |
+
"learning_rate": 8.801826886367825e-05,
|
| 5144 |
+
"loss": 0.0,
|
| 5145 |
+
"step": 728
|
| 5146 |
+
},
|
| 5147 |
+
{
|
| 5148 |
+
"epoch": 0.1530226700251889,
|
| 5149 |
+
"grad_norm": NaN,
|
| 5150 |
+
"learning_rate": 8.77844971719803e-05,
|
| 5151 |
+
"loss": 0.0,
|
| 5152 |
+
"step": 729
|
| 5153 |
+
},
|
| 5154 |
+
{
|
| 5155 |
+
"epoch": 0.15323257766582704,
|
| 5156 |
+
"grad_norm": NaN,
|
| 5157 |
+
"learning_rate": 8.755079322871215e-05,
|
| 5158 |
+
"loss": 0.0,
|
| 5159 |
+
"step": 730
|
| 5160 |
+
},
|
| 5161 |
+
{
|
| 5162 |
+
"epoch": 0.15344248530646515,
|
| 5163 |
+
"grad_norm": NaN,
|
| 5164 |
+
"learning_rate": 8.731715833001983e-05,
|
| 5165 |
+
"loss": 0.0,
|
| 5166 |
+
"step": 731
|
| 5167 |
+
},
|
| 5168 |
+
{
|
| 5169 |
+
"epoch": 0.15365239294710328,
|
| 5170 |
+
"grad_norm": NaN,
|
| 5171 |
+
"learning_rate": 8.708359377166639e-05,
|
| 5172 |
+
"loss": 0.0,
|
| 5173 |
+
"step": 732
|
| 5174 |
+
},
|
| 5175 |
+
{
|
| 5176 |
+
"epoch": 0.1538623005877414,
|
| 5177 |
+
"grad_norm": NaN,
|
| 5178 |
+
"learning_rate": 8.68501008490248e-05,
|
| 5179 |
+
"loss": 0.0,
|
| 5180 |
+
"step": 733
|
| 5181 |
+
},
|
| 5182 |
+
{
|
| 5183 |
+
"epoch": 0.15407220822837953,
|
| 5184 |
+
"grad_norm": NaN,
|
| 5185 |
+
"learning_rate": 8.661668085707073e-05,
|
| 5186 |
+
"loss": 0.0,
|
| 5187 |
+
"step": 734
|
| 5188 |
+
},
|
| 5189 |
+
{
|
| 5190 |
+
"epoch": 0.15428211586901763,
|
| 5191 |
+
"grad_norm": NaN,
|
| 5192 |
+
"learning_rate": 8.638333509037536e-05,
|
| 5193 |
+
"loss": 0.0,
|
| 5194 |
+
"step": 735
|
| 5195 |
+
},
|
| 5196 |
+
{
|
| 5197 |
+
"epoch": 0.15449202350965574,
|
| 5198 |
+
"grad_norm": NaN,
|
| 5199 |
+
"learning_rate": 8.61500648430982e-05,
|
| 5200 |
+
"loss": 0.0,
|
| 5201 |
+
"step": 736
|
| 5202 |
+
},
|
| 5203 |
+
{
|
| 5204 |
+
"epoch": 0.15470193115029388,
|
| 5205 |
+
"grad_norm": NaN,
|
| 5206 |
+
"learning_rate": 8.591687140897995e-05,
|
| 5207 |
+
"loss": 0.0,
|
| 5208 |
+
"step": 737
|
| 5209 |
+
},
|
| 5210 |
+
{
|
| 5211 |
+
"epoch": 0.15491183879093198,
|
| 5212 |
+
"grad_norm": NaN,
|
| 5213 |
+
"learning_rate": 8.568375608133526e-05,
|
| 5214 |
+
"loss": 0.0,
|
| 5215 |
+
"step": 738
|
| 5216 |
+
},
|
| 5217 |
+
{
|
| 5218 |
+
"epoch": 0.15512174643157012,
|
| 5219 |
+
"grad_norm": NaN,
|
| 5220 |
+
"learning_rate": 8.54507201530456e-05,
|
| 5221 |
+
"loss": 0.0,
|
| 5222 |
+
"step": 739
|
| 5223 |
+
},
|
| 5224 |
+
{
|
| 5225 |
+
"epoch": 0.15533165407220823,
|
| 5226 |
+
"grad_norm": NaN,
|
| 5227 |
+
"learning_rate": 8.521776491655216e-05,
|
| 5228 |
+
"loss": 0.0,
|
| 5229 |
+
"step": 740
|
| 5230 |
+
},
|
| 5231 |
+
{
|
| 5232 |
+
"epoch": 0.15554156171284636,
|
| 5233 |
+
"grad_norm": NaN,
|
| 5234 |
+
"learning_rate": 8.49848916638485e-05,
|
| 5235 |
+
"loss": 0.0,
|
| 5236 |
+
"step": 741
|
| 5237 |
+
},
|
| 5238 |
+
{
|
| 5239 |
+
"epoch": 0.15575146935348447,
|
| 5240 |
+
"grad_norm": NaN,
|
| 5241 |
+
"learning_rate": 8.475210168647352e-05,
|
| 5242 |
+
"loss": 0.0,
|
| 5243 |
+
"step": 742
|
| 5244 |
+
},
|
| 5245 |
+
{
|
| 5246 |
+
"epoch": 0.15596137699412257,
|
| 5247 |
+
"grad_norm": NaN,
|
| 5248 |
+
"learning_rate": 8.45193962755043e-05,
|
| 5249 |
+
"loss": 0.0,
|
| 5250 |
+
"step": 743
|
| 5251 |
+
},
|
| 5252 |
+
{
|
| 5253 |
+
"epoch": 0.1561712846347607,
|
| 5254 |
+
"grad_norm": NaN,
|
| 5255 |
+
"learning_rate": 8.428677672154887e-05,
|
| 5256 |
+
"loss": 0.0,
|
| 5257 |
+
"step": 744
|
| 5258 |
+
},
|
| 5259 |
+
{
|
| 5260 |
+
"epoch": 0.15638119227539882,
|
| 5261 |
+
"grad_norm": NaN,
|
| 5262 |
+
"learning_rate": 8.40542443147391e-05,
|
| 5263 |
+
"loss": 0.0,
|
| 5264 |
+
"step": 745
|
| 5265 |
+
},
|
| 5266 |
+
{
|
| 5267 |
+
"epoch": 0.15659109991603695,
|
| 5268 |
+
"grad_norm": NaN,
|
| 5269 |
+
"learning_rate": 8.382180034472353e-05,
|
| 5270 |
+
"loss": 0.0,
|
| 5271 |
+
"step": 746
|
| 5272 |
+
},
|
| 5273 |
+
{
|
| 5274 |
+
"epoch": 0.15680100755667506,
|
| 5275 |
+
"grad_norm": NaN,
|
| 5276 |
+
"learning_rate": 8.358944610066024e-05,
|
| 5277 |
+
"loss": 0.0,
|
| 5278 |
+
"step": 747
|
| 5279 |
+
},
|
| 5280 |
+
{
|
| 5281 |
+
"epoch": 0.1570109151973132,
|
| 5282 |
+
"grad_norm": NaN,
|
| 5283 |
+
"learning_rate": 8.335718287120968e-05,
|
| 5284 |
+
"loss": 0.0,
|
| 5285 |
+
"step": 748
|
| 5286 |
+
},
|
| 5287 |
+
{
|
| 5288 |
+
"epoch": 0.1572208228379513,
|
| 5289 |
+
"grad_norm": NaN,
|
| 5290 |
+
"learning_rate": 8.312501194452747e-05,
|
| 5291 |
+
"loss": 0.0,
|
| 5292 |
+
"step": 749
|
| 5293 |
+
},
|
| 5294 |
+
{
|
| 5295 |
+
"epoch": 0.1574307304785894,
|
| 5296 |
+
"grad_norm": NaN,
|
| 5297 |
+
"learning_rate": 8.289293460825737e-05,
|
| 5298 |
+
"loss": 0.0,
|
| 5299 |
+
"step": 750
|
| 5300 |
+
},
|
| 5301 |
+
{
|
| 5302 |
+
"epoch": 0.1574307304785894,
|
| 5303 |
+
"eval_loss": NaN,
|
| 5304 |
+
"eval_runtime": 237.5294,
|
| 5305 |
+
"eval_samples_per_second": 6.685,
|
| 5306 |
+
"eval_steps_per_second": 3.343,
|
| 5307 |
+
"step": 750
|
| 5308 |
}
|
| 5309 |
],
|
| 5310 |
"logging_steps": 1,
|
|
|
|
| 5319 |
"early_stopping_threshold": 0.0
|
| 5320 |
},
|
| 5321 |
"attributes": {
|
| 5322 |
+
"early_stopping_patience_counter": 4
|
| 5323 |
}
|
| 5324 |
},
|
| 5325 |
"TrainerControl": {
|
|
|
|
| 5328 |
"should_evaluate": false,
|
| 5329 |
"should_log": false,
|
| 5330 |
"should_save": true,
|
| 5331 |
+
"should_training_stop": true
|
| 5332 |
},
|
| 5333 |
"attributes": {}
|
| 5334 |
}
|
| 5335 |
},
|
| 5336 |
+
"total_flos": 5.36624857350144e+17,
|
| 5337 |
"train_batch_size": 2,
|
| 5338 |
"trial_name": null,
|
| 5339 |
"trial_params": null
|