Training in progress, step 600, checkpoint
Browse files
last-checkpoint/adapter_config.json
CHANGED
|
@@ -20,13 +20,13 @@
|
|
| 20 |
"rank_pattern": {},
|
| 21 |
"revision": null,
|
| 22 |
"target_modules": [
|
| 23 |
-
"o_proj",
|
| 24 |
"q_proj",
|
| 25 |
-
"k_proj",
|
| 26 |
-
"down_proj",
|
| 27 |
-
"gate_proj",
|
| 28 |
"v_proj",
|
| 29 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
],
|
| 31 |
"task_type": "CAUSAL_LM",
|
| 32 |
"use_dora": false,
|
|
|
|
| 20 |
"rank_pattern": {},
|
| 21 |
"revision": null,
|
| 22 |
"target_modules": [
|
|
|
|
| 23 |
"q_proj",
|
|
|
|
|
|
|
|
|
|
| 24 |
"v_proj",
|
| 25 |
+
"gate_proj",
|
| 26 |
+
"down_proj",
|
| 27 |
+
"o_proj",
|
| 28 |
+
"up_proj",
|
| 29 |
+
"k_proj"
|
| 30 |
],
|
| 31 |
"task_type": "CAUSAL_LM",
|
| 32 |
"use_dora": false,
|
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 639691872
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4b323ad4be24049867110298a5696d7a4d3bc1285b5d9abb776fd44bb62c4e7e
|
| 3 |
size 639691872
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1279647314
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5f743f776850133223224bc723df1ecc1783afc6f39100d552a3269e6ec930b9
|
| 3 |
size 1279647314
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a3fb2209e718294a208c6cc709af7b887a1d8670e8f1a4bc2f4f906958b18e41
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:337154a73245c7e602fb3be659a2a2f9a9857c7eb27089f66eef80ad815c4899
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 1.
|
| 3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 200,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2831,6 +2831,1414 @@
|
|
| 2831 |
"eval_samples_per_second": 2.12,
|
| 2832 |
"eval_steps_per_second": 2.12,
|
| 2833 |
"step": 400
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2834 |
}
|
| 2835 |
],
|
| 2836 |
"logging_steps": 1,
|
|
@@ -2859,7 +4267,7 @@
|
|
| 2859 |
"attributes": {}
|
| 2860 |
}
|
| 2861 |
},
|
| 2862 |
-
"total_flos":
|
| 2863 |
"train_batch_size": 1,
|
| 2864 |
"trial_name": null,
|
| 2865 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 1.5770864486694336,
|
| 3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-600",
|
| 4 |
+
"epoch": 0.3236573278041873,
|
| 5 |
"eval_steps": 200,
|
| 6 |
+
"global_step": 600,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2831 |
"eval_samples_per_second": 2.12,
|
| 2832 |
"eval_steps_per_second": 2.12,
|
| 2833 |
"step": 400
|
| 2834 |
+
},
|
| 2835 |
+
{
|
| 2836 |
+
"epoch": 0.21631098074913185,
|
| 2837 |
+
"grad_norm": 24.1074161529541,
|
| 2838 |
+
"learning_rate": 0.00019850345310169155,
|
| 2839 |
+
"loss": 37.3797,
|
| 2840 |
+
"step": 401
|
| 2841 |
+
},
|
| 2842 |
+
{
|
| 2843 |
+
"epoch": 0.2168504096288055,
|
| 2844 |
+
"grad_norm": 62.604949951171875,
|
| 2845 |
+
"learning_rate": 0.00019849351783439561,
|
| 2846 |
+
"loss": 78.7953,
|
| 2847 |
+
"step": 402
|
| 2848 |
+
},
|
| 2849 |
+
{
|
| 2850 |
+
"epoch": 0.21738983850847915,
|
| 2851 |
+
"grad_norm": 43.36476135253906,
|
| 2852 |
+
"learning_rate": 0.0001984835499473072,
|
| 2853 |
+
"loss": 82.645,
|
| 2854 |
+
"step": 403
|
| 2855 |
+
},
|
| 2856 |
+
{
|
| 2857 |
+
"epoch": 0.2179292673881528,
|
| 2858 |
+
"grad_norm": 52.12046432495117,
|
| 2859 |
+
"learning_rate": 0.0001984735494437275,
|
| 2860 |
+
"loss": 87.0839,
|
| 2861 |
+
"step": 404
|
| 2862 |
+
},
|
| 2863 |
+
{
|
| 2864 |
+
"epoch": 0.21846869626782645,
|
| 2865 |
+
"grad_norm": 34.333431243896484,
|
| 2866 |
+
"learning_rate": 0.00019846351632696863,
|
| 2867 |
+
"loss": 105.6289,
|
| 2868 |
+
"step": 405
|
| 2869 |
+
},
|
| 2870 |
+
{
|
| 2871 |
+
"epoch": 0.2190081251475001,
|
| 2872 |
+
"grad_norm": 41.665771484375,
|
| 2873 |
+
"learning_rate": 0.00019845345060035335,
|
| 2874 |
+
"loss": 112.3874,
|
| 2875 |
+
"step": 406
|
| 2876 |
+
},
|
| 2877 |
+
{
|
| 2878 |
+
"epoch": 0.21954755402717374,
|
| 2879 |
+
"grad_norm": 58.79914093017578,
|
| 2880 |
+
"learning_rate": 0.00019844335226721537,
|
| 2881 |
+
"loss": 114.2657,
|
| 2882 |
+
"step": 407
|
| 2883 |
+
},
|
| 2884 |
+
{
|
| 2885 |
+
"epoch": 0.22008698290684736,
|
| 2886 |
+
"grad_norm": 52.85742950439453,
|
| 2887 |
+
"learning_rate": 0.00019843322133089906,
|
| 2888 |
+
"loss": 98.4778,
|
| 2889 |
+
"step": 408
|
| 2890 |
+
},
|
| 2891 |
+
{
|
| 2892 |
+
"epoch": 0.220626411786521,
|
| 2893 |
+
"grad_norm": 53.792476654052734,
|
| 2894 |
+
"learning_rate": 0.00019842305779475968,
|
| 2895 |
+
"loss": 94.7811,
|
| 2896 |
+
"step": 409
|
| 2897 |
+
},
|
| 2898 |
+
{
|
| 2899 |
+
"epoch": 0.22116584066619466,
|
| 2900 |
+
"grad_norm": 49.56667709350586,
|
| 2901 |
+
"learning_rate": 0.0001984128616621633,
|
| 2902 |
+
"loss": 92.4516,
|
| 2903 |
+
"step": 410
|
| 2904 |
+
},
|
| 2905 |
+
{
|
| 2906 |
+
"epoch": 0.2217052695458683,
|
| 2907 |
+
"grad_norm": 38.96401596069336,
|
| 2908 |
+
"learning_rate": 0.0001984026329364867,
|
| 2909 |
+
"loss": 78.0561,
|
| 2910 |
+
"step": 411
|
| 2911 |
+
},
|
| 2912 |
+
{
|
| 2913 |
+
"epoch": 0.22224469842554195,
|
| 2914 |
+
"grad_norm": 35.649200439453125,
|
| 2915 |
+
"learning_rate": 0.00019839237162111757,
|
| 2916 |
+
"loss": 66.0612,
|
| 2917 |
+
"step": 412
|
| 2918 |
+
},
|
| 2919 |
+
{
|
| 2920 |
+
"epoch": 0.2227841273052156,
|
| 2921 |
+
"grad_norm": 22.54837989807129,
|
| 2922 |
+
"learning_rate": 0.00019838207771945426,
|
| 2923 |
+
"loss": 59.3091,
|
| 2924 |
+
"step": 413
|
| 2925 |
+
},
|
| 2926 |
+
{
|
| 2927 |
+
"epoch": 0.22332355618488925,
|
| 2928 |
+
"grad_norm": 16.843589782714844,
|
| 2929 |
+
"learning_rate": 0.00019837175123490596,
|
| 2930 |
+
"loss": 62.8711,
|
| 2931 |
+
"step": 414
|
| 2932 |
+
},
|
| 2933 |
+
{
|
| 2934 |
+
"epoch": 0.2238629850645629,
|
| 2935 |
+
"grad_norm": 18.909435272216797,
|
| 2936 |
+
"learning_rate": 0.00019836139217089275,
|
| 2937 |
+
"loss": 55.3784,
|
| 2938 |
+
"step": 415
|
| 2939 |
+
},
|
| 2940 |
+
{
|
| 2941 |
+
"epoch": 0.22440241394423655,
|
| 2942 |
+
"grad_norm": 25.120887756347656,
|
| 2943 |
+
"learning_rate": 0.0001983510005308454,
|
| 2944 |
+
"loss": 51.9063,
|
| 2945 |
+
"step": 416
|
| 2946 |
+
},
|
| 2947 |
+
{
|
| 2948 |
+
"epoch": 0.2249418428239102,
|
| 2949 |
+
"grad_norm": 30.78650665283203,
|
| 2950 |
+
"learning_rate": 0.00019834057631820543,
|
| 2951 |
+
"loss": 32.4726,
|
| 2952 |
+
"step": 417
|
| 2953 |
+
},
|
| 2954 |
+
{
|
| 2955 |
+
"epoch": 0.22548127170358384,
|
| 2956 |
+
"grad_norm": 72.46208953857422,
|
| 2957 |
+
"learning_rate": 0.00019833011953642525,
|
| 2958 |
+
"loss": 44.1452,
|
| 2959 |
+
"step": 418
|
| 2960 |
+
},
|
| 2961 |
+
{
|
| 2962 |
+
"epoch": 0.2260207005832575,
|
| 2963 |
+
"grad_norm": 45.94267654418945,
|
| 2964 |
+
"learning_rate": 0.000198319630188968,
|
| 2965 |
+
"loss": 50.9596,
|
| 2966 |
+
"step": 419
|
| 2967 |
+
},
|
| 2968 |
+
{
|
| 2969 |
+
"epoch": 0.2265601294629311,
|
| 2970 |
+
"grad_norm": 47.52016067504883,
|
| 2971 |
+
"learning_rate": 0.00019830910827930764,
|
| 2972 |
+
"loss": 44.8286,
|
| 2973 |
+
"step": 420
|
| 2974 |
+
},
|
| 2975 |
+
{
|
| 2976 |
+
"epoch": 0.22709955834260476,
|
| 2977 |
+
"grad_norm": 40.93891525268555,
|
| 2978 |
+
"learning_rate": 0.00019829855381092886,
|
| 2979 |
+
"loss": 56.7985,
|
| 2980 |
+
"step": 421
|
| 2981 |
+
},
|
| 2982 |
+
{
|
| 2983 |
+
"epoch": 0.2276389872222784,
|
| 2984 |
+
"grad_norm": 36.567108154296875,
|
| 2985 |
+
"learning_rate": 0.0001982879667873272,
|
| 2986 |
+
"loss": 35.7161,
|
| 2987 |
+
"step": 422
|
| 2988 |
+
},
|
| 2989 |
+
{
|
| 2990 |
+
"epoch": 0.22817841610195205,
|
| 2991 |
+
"grad_norm": 31.908977508544922,
|
| 2992 |
+
"learning_rate": 0.0001982773472120089,
|
| 2993 |
+
"loss": 42.8407,
|
| 2994 |
+
"step": 423
|
| 2995 |
+
},
|
| 2996 |
+
{
|
| 2997 |
+
"epoch": 0.2287178449816257,
|
| 2998 |
+
"grad_norm": 37.47427749633789,
|
| 2999 |
+
"learning_rate": 0.00019826669508849108,
|
| 3000 |
+
"loss": 39.5264,
|
| 3001 |
+
"step": 424
|
| 3002 |
+
},
|
| 3003 |
+
{
|
| 3004 |
+
"epoch": 0.22925727386129935,
|
| 3005 |
+
"grad_norm": 43.83090591430664,
|
| 3006 |
+
"learning_rate": 0.00019825601042030156,
|
| 3007 |
+
"loss": 48.5415,
|
| 3008 |
+
"step": 425
|
| 3009 |
+
},
|
| 3010 |
+
{
|
| 3011 |
+
"epoch": 0.229796702740973,
|
| 3012 |
+
"grad_norm": 42.004425048828125,
|
| 3013 |
+
"learning_rate": 0.00019824529321097893,
|
| 3014 |
+
"loss": 39.4127,
|
| 3015 |
+
"step": 426
|
| 3016 |
+
},
|
| 3017 |
+
{
|
| 3018 |
+
"epoch": 0.23033613162064664,
|
| 3019 |
+
"grad_norm": 38.282066345214844,
|
| 3020 |
+
"learning_rate": 0.00019823454346407267,
|
| 3021 |
+
"loss": 40.8499,
|
| 3022 |
+
"step": 427
|
| 3023 |
+
},
|
| 3024 |
+
{
|
| 3025 |
+
"epoch": 0.2308755605003203,
|
| 3026 |
+
"grad_norm": 33.92627716064453,
|
| 3027 |
+
"learning_rate": 0.0001982237611831429,
|
| 3028 |
+
"loss": 35.4472,
|
| 3029 |
+
"step": 428
|
| 3030 |
+
},
|
| 3031 |
+
{
|
| 3032 |
+
"epoch": 0.23141498937999394,
|
| 3033 |
+
"grad_norm": 53.361106872558594,
|
| 3034 |
+
"learning_rate": 0.00019821294637176057,
|
| 3035 |
+
"loss": 43.1921,
|
| 3036 |
+
"step": 429
|
| 3037 |
+
},
|
| 3038 |
+
{
|
| 3039 |
+
"epoch": 0.2319544182596676,
|
| 3040 |
+
"grad_norm": 40.92842102050781,
|
| 3041 |
+
"learning_rate": 0.00019820209903350744,
|
| 3042 |
+
"loss": 36.5019,
|
| 3043 |
+
"step": 430
|
| 3044 |
+
},
|
| 3045 |
+
{
|
| 3046 |
+
"epoch": 0.2324938471393412,
|
| 3047 |
+
"grad_norm": 35.71042251586914,
|
| 3048 |
+
"learning_rate": 0.00019819121917197602,
|
| 3049 |
+
"loss": 36.598,
|
| 3050 |
+
"step": 431
|
| 3051 |
+
},
|
| 3052 |
+
{
|
| 3053 |
+
"epoch": 0.23303327601901486,
|
| 3054 |
+
"grad_norm": 35.10508728027344,
|
| 3055 |
+
"learning_rate": 0.00019818030679076952,
|
| 3056 |
+
"loss": 31.6675,
|
| 3057 |
+
"step": 432
|
| 3058 |
+
},
|
| 3059 |
+
{
|
| 3060 |
+
"epoch": 0.2335727048986885,
|
| 3061 |
+
"grad_norm": 31.885364532470703,
|
| 3062 |
+
"learning_rate": 0.00019816936189350206,
|
| 3063 |
+
"loss": 34.3554,
|
| 3064 |
+
"step": 433
|
| 3065 |
+
},
|
| 3066 |
+
{
|
| 3067 |
+
"epoch": 0.23411213377836215,
|
| 3068 |
+
"grad_norm": 42.998878479003906,
|
| 3069 |
+
"learning_rate": 0.0001981583844837984,
|
| 3070 |
+
"loss": 28.1099,
|
| 3071 |
+
"step": 434
|
| 3072 |
+
},
|
| 3073 |
+
{
|
| 3074 |
+
"epoch": 0.2346515626580358,
|
| 3075 |
+
"grad_norm": 38.70567321777344,
|
| 3076 |
+
"learning_rate": 0.00019814737456529412,
|
| 3077 |
+
"loss": 42.3567,
|
| 3078 |
+
"step": 435
|
| 3079 |
+
},
|
| 3080 |
+
{
|
| 3081 |
+
"epoch": 0.23519099153770945,
|
| 3082 |
+
"grad_norm": 34.43855285644531,
|
| 3083 |
+
"learning_rate": 0.00019813633214163555,
|
| 3084 |
+
"loss": 22.8285,
|
| 3085 |
+
"step": 436
|
| 3086 |
+
},
|
| 3087 |
+
{
|
| 3088 |
+
"epoch": 0.2357304204173831,
|
| 3089 |
+
"grad_norm": 33.38055419921875,
|
| 3090 |
+
"learning_rate": 0.00019812525721647986,
|
| 3091 |
+
"loss": 36.1465,
|
| 3092 |
+
"step": 437
|
| 3093 |
+
},
|
| 3094 |
+
{
|
| 3095 |
+
"epoch": 0.23626984929705674,
|
| 3096 |
+
"grad_norm": 42.98970413208008,
|
| 3097 |
+
"learning_rate": 0.00019811414979349485,
|
| 3098 |
+
"loss": 34.8416,
|
| 3099 |
+
"step": 438
|
| 3100 |
+
},
|
| 3101 |
+
{
|
| 3102 |
+
"epoch": 0.2368092781767304,
|
| 3103 |
+
"grad_norm": 37.12187957763672,
|
| 3104 |
+
"learning_rate": 0.0001981030098763592,
|
| 3105 |
+
"loss": 34.276,
|
| 3106 |
+
"step": 439
|
| 3107 |
+
},
|
| 3108 |
+
{
|
| 3109 |
+
"epoch": 0.23734870705640404,
|
| 3110 |
+
"grad_norm": 44.36403274536133,
|
| 3111 |
+
"learning_rate": 0.00019809183746876232,
|
| 3112 |
+
"loss": 30.3544,
|
| 3113 |
+
"step": 440
|
| 3114 |
+
},
|
| 3115 |
+
{
|
| 3116 |
+
"epoch": 0.2378881359360777,
|
| 3117 |
+
"grad_norm": 46.281654357910156,
|
| 3118 |
+
"learning_rate": 0.00019808063257440432,
|
| 3119 |
+
"loss": 27.8803,
|
| 3120 |
+
"step": 441
|
| 3121 |
+
},
|
| 3122 |
+
{
|
| 3123 |
+
"epoch": 0.23842756481575134,
|
| 3124 |
+
"grad_norm": 49.94664001464844,
|
| 3125 |
+
"learning_rate": 0.00019806939519699613,
|
| 3126 |
+
"loss": 31.0358,
|
| 3127 |
+
"step": 442
|
| 3128 |
+
},
|
| 3129 |
+
{
|
| 3130 |
+
"epoch": 0.23896699369542496,
|
| 3131 |
+
"grad_norm": 42.308616638183594,
|
| 3132 |
+
"learning_rate": 0.0001980581253402595,
|
| 3133 |
+
"loss": 29.4053,
|
| 3134 |
+
"step": 443
|
| 3135 |
+
},
|
| 3136 |
+
{
|
| 3137 |
+
"epoch": 0.2395064225750986,
|
| 3138 |
+
"grad_norm": 51.36742401123047,
|
| 3139 |
+
"learning_rate": 0.00019804682300792674,
|
| 3140 |
+
"loss": 31.0947,
|
| 3141 |
+
"step": 444
|
| 3142 |
+
},
|
| 3143 |
+
{
|
| 3144 |
+
"epoch": 0.24004585145477225,
|
| 3145 |
+
"grad_norm": 40.25013732910156,
|
| 3146 |
+
"learning_rate": 0.00019803548820374113,
|
| 3147 |
+
"loss": 26.6703,
|
| 3148 |
+
"step": 445
|
| 3149 |
+
},
|
| 3150 |
+
{
|
| 3151 |
+
"epoch": 0.2405852803344459,
|
| 3152 |
+
"grad_norm": 53.013710021972656,
|
| 3153 |
+
"learning_rate": 0.00019802412093145657,
|
| 3154 |
+
"loss": 35.5286,
|
| 3155 |
+
"step": 446
|
| 3156 |
+
},
|
| 3157 |
+
{
|
| 3158 |
+
"epoch": 0.24112470921411955,
|
| 3159 |
+
"grad_norm": 41.21833038330078,
|
| 3160 |
+
"learning_rate": 0.00019801272119483775,
|
| 3161 |
+
"loss": 25.3315,
|
| 3162 |
+
"step": 447
|
| 3163 |
+
},
|
| 3164 |
+
{
|
| 3165 |
+
"epoch": 0.2416641380937932,
|
| 3166 |
+
"grad_norm": 61.56970977783203,
|
| 3167 |
+
"learning_rate": 0.00019800128899766017,
|
| 3168 |
+
"loss": 27.589,
|
| 3169 |
+
"step": 448
|
| 3170 |
+
},
|
| 3171 |
+
{
|
| 3172 |
+
"epoch": 0.24220356697346684,
|
| 3173 |
+
"grad_norm": 58.22453308105469,
|
| 3174 |
+
"learning_rate": 0.00019798982434371,
|
| 3175 |
+
"loss": 37.2235,
|
| 3176 |
+
"step": 449
|
| 3177 |
+
},
|
| 3178 |
+
{
|
| 3179 |
+
"epoch": 0.2427429958531405,
|
| 3180 |
+
"grad_norm": 36.04716110229492,
|
| 3181 |
+
"learning_rate": 0.00019797832723678413,
|
| 3182 |
+
"loss": 28.1485,
|
| 3183 |
+
"step": 450
|
| 3184 |
+
},
|
| 3185 |
+
{
|
| 3186 |
+
"epoch": 0.24328242473281414,
|
| 3187 |
+
"grad_norm": 50.804813385009766,
|
| 3188 |
+
"learning_rate": 0.00019796679768069032,
|
| 3189 |
+
"loss": 49.1471,
|
| 3190 |
+
"step": 451
|
| 3191 |
+
},
|
| 3192 |
+
{
|
| 3193 |
+
"epoch": 0.2438218536124878,
|
| 3194 |
+
"grad_norm": 91.2785873413086,
|
| 3195 |
+
"learning_rate": 0.00019795523567924702,
|
| 3196 |
+
"loss": 72.8998,
|
| 3197 |
+
"step": 452
|
| 3198 |
+
},
|
| 3199 |
+
{
|
| 3200 |
+
"epoch": 0.24436128249216144,
|
| 3201 |
+
"grad_norm": 110.37539672851562,
|
| 3202 |
+
"learning_rate": 0.00019794364123628335,
|
| 3203 |
+
"loss": 98.2308,
|
| 3204 |
+
"step": 453
|
| 3205 |
+
},
|
| 3206 |
+
{
|
| 3207 |
+
"epoch": 0.24490071137183506,
|
| 3208 |
+
"grad_norm": 79.3825912475586,
|
| 3209 |
+
"learning_rate": 0.00019793201435563932,
|
| 3210 |
+
"loss": 109.7274,
|
| 3211 |
+
"step": 454
|
| 3212 |
+
},
|
| 3213 |
+
{
|
| 3214 |
+
"epoch": 0.2454401402515087,
|
| 3215 |
+
"grad_norm": 36.62171173095703,
|
| 3216 |
+
"learning_rate": 0.00019792035504116555,
|
| 3217 |
+
"loss": 107.5116,
|
| 3218 |
+
"step": 455
|
| 3219 |
+
},
|
| 3220 |
+
{
|
| 3221 |
+
"epoch": 0.24597956913118235,
|
| 3222 |
+
"grad_norm": 57.664146423339844,
|
| 3223 |
+
"learning_rate": 0.00019790866329672346,
|
| 3224 |
+
"loss": 113.5622,
|
| 3225 |
+
"step": 456
|
| 3226 |
+
},
|
| 3227 |
+
{
|
| 3228 |
+
"epoch": 0.246518998010856,
|
| 3229 |
+
"grad_norm": 57.12027359008789,
|
| 3230 |
+
"learning_rate": 0.00019789693912618524,
|
| 3231 |
+
"loss": 102.4627,
|
| 3232 |
+
"step": 457
|
| 3233 |
+
},
|
| 3234 |
+
{
|
| 3235 |
+
"epoch": 0.24705842689052965,
|
| 3236 |
+
"grad_norm": 67.92241668701172,
|
| 3237 |
+
"learning_rate": 0.00019788518253343376,
|
| 3238 |
+
"loss": 90.2483,
|
| 3239 |
+
"step": 458
|
| 3240 |
+
},
|
| 3241 |
+
{
|
| 3242 |
+
"epoch": 0.2475978557702033,
|
| 3243 |
+
"grad_norm": 63.95331573486328,
|
| 3244 |
+
"learning_rate": 0.00019787339352236264,
|
| 3245 |
+
"loss": 94.7671,
|
| 3246 |
+
"step": 459
|
| 3247 |
+
},
|
| 3248 |
+
{
|
| 3249 |
+
"epoch": 0.24813728464987694,
|
| 3250 |
+
"grad_norm": 55.70960235595703,
|
| 3251 |
+
"learning_rate": 0.00019786157209687627,
|
| 3252 |
+
"loss": 92.1523,
|
| 3253 |
+
"step": 460
|
| 3254 |
+
},
|
| 3255 |
+
{
|
| 3256 |
+
"epoch": 0.2486767135295506,
|
| 3257 |
+
"grad_norm": 44.270233154296875,
|
| 3258 |
+
"learning_rate": 0.00019784971826088973,
|
| 3259 |
+
"loss": 82.3084,
|
| 3260 |
+
"step": 461
|
| 3261 |
+
},
|
| 3262 |
+
{
|
| 3263 |
+
"epoch": 0.24921614240922424,
|
| 3264 |
+
"grad_norm": 35.74955749511719,
|
| 3265 |
+
"learning_rate": 0.0001978378320183289,
|
| 3266 |
+
"loss": 71.401,
|
| 3267 |
+
"step": 462
|
| 3268 |
+
},
|
| 3269 |
+
{
|
| 3270 |
+
"epoch": 0.2497555712888979,
|
| 3271 |
+
"grad_norm": 26.20838165283203,
|
| 3272 |
+
"learning_rate": 0.00019782591337313035,
|
| 3273 |
+
"loss": 68.6018,
|
| 3274 |
+
"step": 463
|
| 3275 |
+
},
|
| 3276 |
+
{
|
| 3277 |
+
"epoch": 0.25029500016857154,
|
| 3278 |
+
"grad_norm": 20.70208740234375,
|
| 3279 |
+
"learning_rate": 0.00019781396232924133,
|
| 3280 |
+
"loss": 62.6257,
|
| 3281 |
+
"step": 464
|
| 3282 |
+
},
|
| 3283 |
+
{
|
| 3284 |
+
"epoch": 0.25083442904824516,
|
| 3285 |
+
"grad_norm": 17.804771423339844,
|
| 3286 |
+
"learning_rate": 0.00019780197889061993,
|
| 3287 |
+
"loss": 54.6564,
|
| 3288 |
+
"step": 465
|
| 3289 |
+
},
|
| 3290 |
+
{
|
| 3291 |
+
"epoch": 0.25137385792791883,
|
| 3292 |
+
"grad_norm": 24.327360153198242,
|
| 3293 |
+
"learning_rate": 0.0001977899630612349,
|
| 3294 |
+
"loss": 50.7451,
|
| 3295 |
+
"step": 466
|
| 3296 |
+
},
|
| 3297 |
+
{
|
| 3298 |
+
"epoch": 0.25191328680759245,
|
| 3299 |
+
"grad_norm": 29.580142974853516,
|
| 3300 |
+
"learning_rate": 0.00019777791484506567,
|
| 3301 |
+
"loss": 34.4045,
|
| 3302 |
+
"step": 467
|
| 3303 |
+
},
|
| 3304 |
+
{
|
| 3305 |
+
"epoch": 0.2524527156872661,
|
| 3306 |
+
"grad_norm": 30.99888801574707,
|
| 3307 |
+
"learning_rate": 0.00019776583424610254,
|
| 3308 |
+
"loss": 41.2975,
|
| 3309 |
+
"step": 468
|
| 3310 |
+
},
|
| 3311 |
+
{
|
| 3312 |
+
"epoch": 0.25299214456693975,
|
| 3313 |
+
"grad_norm": 40.59465408325195,
|
| 3314 |
+
"learning_rate": 0.0001977537212683464,
|
| 3315 |
+
"loss": 56.0607,
|
| 3316 |
+
"step": 469
|
| 3317 |
+
},
|
| 3318 |
+
{
|
| 3319 |
+
"epoch": 0.2535315734466134,
|
| 3320 |
+
"grad_norm": 42.85790252685547,
|
| 3321 |
+
"learning_rate": 0.00019774157591580894,
|
| 3322 |
+
"loss": 40.9168,
|
| 3323 |
+
"step": 470
|
| 3324 |
+
},
|
| 3325 |
+
{
|
| 3326 |
+
"epoch": 0.25407100232628704,
|
| 3327 |
+
"grad_norm": 38.090885162353516,
|
| 3328 |
+
"learning_rate": 0.0001977293981925125,
|
| 3329 |
+
"loss": 49.6262,
|
| 3330 |
+
"step": 471
|
| 3331 |
+
},
|
| 3332 |
+
{
|
| 3333 |
+
"epoch": 0.25461043120596066,
|
| 3334 |
+
"grad_norm": 33.007991790771484,
|
| 3335 |
+
"learning_rate": 0.0001977171881024902,
|
| 3336 |
+
"loss": 44.5241,
|
| 3337 |
+
"step": 472
|
| 3338 |
+
},
|
| 3339 |
+
{
|
| 3340 |
+
"epoch": 0.25514986008563434,
|
| 3341 |
+
"grad_norm": 39.41592025756836,
|
| 3342 |
+
"learning_rate": 0.00019770494564978595,
|
| 3343 |
+
"loss": 38.185,
|
| 3344 |
+
"step": 473
|
| 3345 |
+
},
|
| 3346 |
+
{
|
| 3347 |
+
"epoch": 0.25568928896530796,
|
| 3348 |
+
"grad_norm": 33.008148193359375,
|
| 3349 |
+
"learning_rate": 0.00019769267083845417,
|
| 3350 |
+
"loss": 42.3843,
|
| 3351 |
+
"step": 474
|
| 3352 |
+
},
|
| 3353 |
+
{
|
| 3354 |
+
"epoch": 0.25622871784498163,
|
| 3355 |
+
"grad_norm": 27.917991638183594,
|
| 3356 |
+
"learning_rate": 0.0001976803636725602,
|
| 3357 |
+
"loss": 33.7216,
|
| 3358 |
+
"step": 475
|
| 3359 |
+
},
|
| 3360 |
+
{
|
| 3361 |
+
"epoch": 0.25676814672465526,
|
| 3362 |
+
"grad_norm": 29.870256423950195,
|
| 3363 |
+
"learning_rate": 0.00019766802415617998,
|
| 3364 |
+
"loss": 35.7963,
|
| 3365 |
+
"step": 476
|
| 3366 |
+
},
|
| 3367 |
+
{
|
| 3368 |
+
"epoch": 0.25730757560432893,
|
| 3369 |
+
"grad_norm": 44.98633575439453,
|
| 3370 |
+
"learning_rate": 0.0001976556522934002,
|
| 3371 |
+
"loss": 35.8127,
|
| 3372 |
+
"step": 477
|
| 3373 |
+
},
|
| 3374 |
+
{
|
| 3375 |
+
"epoch": 0.25784700448400255,
|
| 3376 |
+
"grad_norm": 43.03909683227539,
|
| 3377 |
+
"learning_rate": 0.0001976432480883183,
|
| 3378 |
+
"loss": 35.4111,
|
| 3379 |
+
"step": 478
|
| 3380 |
+
},
|
| 3381 |
+
{
|
| 3382 |
+
"epoch": 0.2583864333636762,
|
| 3383 |
+
"grad_norm": 47.32424545288086,
|
| 3384 |
+
"learning_rate": 0.00019763081154504234,
|
| 3385 |
+
"loss": 41.8895,
|
| 3386 |
+
"step": 479
|
| 3387 |
+
},
|
| 3388 |
+
{
|
| 3389 |
+
"epoch": 0.25892586224334985,
|
| 3390 |
+
"grad_norm": 49.7735595703125,
|
| 3391 |
+
"learning_rate": 0.0001976183426676912,
|
| 3392 |
+
"loss": 32.9801,
|
| 3393 |
+
"step": 480
|
| 3394 |
+
},
|
| 3395 |
+
{
|
| 3396 |
+
"epoch": 0.2594652911230235,
|
| 3397 |
+
"grad_norm": 44.57673645019531,
|
| 3398 |
+
"learning_rate": 0.0001976058414603944,
|
| 3399 |
+
"loss": 36.089,
|
| 3400 |
+
"step": 481
|
| 3401 |
+
},
|
| 3402 |
+
{
|
| 3403 |
+
"epoch": 0.26000472000269714,
|
| 3404 |
+
"grad_norm": 36.22349548339844,
|
| 3405 |
+
"learning_rate": 0.00019759330792729212,
|
| 3406 |
+
"loss": 47.0487,
|
| 3407 |
+
"step": 482
|
| 3408 |
+
},
|
| 3409 |
+
{
|
| 3410 |
+
"epoch": 0.26054414888237076,
|
| 3411 |
+
"grad_norm": 38.58706283569336,
|
| 3412 |
+
"learning_rate": 0.00019758074207253535,
|
| 3413 |
+
"loss": 34.3672,
|
| 3414 |
+
"step": 483
|
| 3415 |
+
},
|
| 3416 |
+
{
|
| 3417 |
+
"epoch": 0.26108357776204444,
|
| 3418 |
+
"grad_norm": 40.61176300048828,
|
| 3419 |
+
"learning_rate": 0.00019756814390028575,
|
| 3420 |
+
"loss": 39.7468,
|
| 3421 |
+
"step": 484
|
| 3422 |
+
},
|
| 3423 |
+
{
|
| 3424 |
+
"epoch": 0.26162300664171806,
|
| 3425 |
+
"grad_norm": 29.439836502075195,
|
| 3426 |
+
"learning_rate": 0.00019755551341471566,
|
| 3427 |
+
"loss": 34.1449,
|
| 3428 |
+
"step": 485
|
| 3429 |
+
},
|
| 3430 |
+
{
|
| 3431 |
+
"epoch": 0.26216243552139173,
|
| 3432 |
+
"grad_norm": 35.68241882324219,
|
| 3433 |
+
"learning_rate": 0.00019754285062000815,
|
| 3434 |
+
"loss": 31.6102,
|
| 3435 |
+
"step": 486
|
| 3436 |
+
},
|
| 3437 |
+
{
|
| 3438 |
+
"epoch": 0.26270186440106535,
|
| 3439 |
+
"grad_norm": 44.2021598815918,
|
| 3440 |
+
"learning_rate": 0.000197530155520357,
|
| 3441 |
+
"loss": 31.8889,
|
| 3442 |
+
"step": 487
|
| 3443 |
+
},
|
| 3444 |
+
{
|
| 3445 |
+
"epoch": 0.26324129328073903,
|
| 3446 |
+
"grad_norm": 53.82715606689453,
|
| 3447 |
+
"learning_rate": 0.00019751742811996656,
|
| 3448 |
+
"loss": 31.6853,
|
| 3449 |
+
"step": 488
|
| 3450 |
+
},
|
| 3451 |
+
{
|
| 3452 |
+
"epoch": 0.26378072216041265,
|
| 3453 |
+
"grad_norm": 41.77256774902344,
|
| 3454 |
+
"learning_rate": 0.00019750466842305208,
|
| 3455 |
+
"loss": 39.1939,
|
| 3456 |
+
"step": 489
|
| 3457 |
+
},
|
| 3458 |
+
{
|
| 3459 |
+
"epoch": 0.2643201510400863,
|
| 3460 |
+
"grad_norm": 36.42414093017578,
|
| 3461 |
+
"learning_rate": 0.00019749187643383937,
|
| 3462 |
+
"loss": 26.3978,
|
| 3463 |
+
"step": 490
|
| 3464 |
+
},
|
| 3465 |
+
{
|
| 3466 |
+
"epoch": 0.26485957991975995,
|
| 3467 |
+
"grad_norm": 49.238014221191406,
|
| 3468 |
+
"learning_rate": 0.00019747905215656498,
|
| 3469 |
+
"loss": 33.8181,
|
| 3470 |
+
"step": 491
|
| 3471 |
+
},
|
| 3472 |
+
{
|
| 3473 |
+
"epoch": 0.2653990087994336,
|
| 3474 |
+
"grad_norm": 37.46484375,
|
| 3475 |
+
"learning_rate": 0.00019746619559547619,
|
| 3476 |
+
"loss": 32.0879,
|
| 3477 |
+
"step": 492
|
| 3478 |
+
},
|
| 3479 |
+
{
|
| 3480 |
+
"epoch": 0.26593843767910724,
|
| 3481 |
+
"grad_norm": 29.428075790405273,
|
| 3482 |
+
"learning_rate": 0.00019745330675483084,
|
| 3483 |
+
"loss": 22.5194,
|
| 3484 |
+
"step": 493
|
| 3485 |
+
},
|
| 3486 |
+
{
|
| 3487 |
+
"epoch": 0.2664778665587809,
|
| 3488 |
+
"grad_norm": 42.24260330200195,
|
| 3489 |
+
"learning_rate": 0.00019744038563889764,
|
| 3490 |
+
"loss": 34.5577,
|
| 3491 |
+
"step": 494
|
| 3492 |
+
},
|
| 3493 |
+
{
|
| 3494 |
+
"epoch": 0.26701729543845454,
|
| 3495 |
+
"grad_norm": 43.271976470947266,
|
| 3496 |
+
"learning_rate": 0.00019742743225195582,
|
| 3497 |
+
"loss": 25.107,
|
| 3498 |
+
"step": 495
|
| 3499 |
+
},
|
| 3500 |
+
{
|
| 3501 |
+
"epoch": 0.26755672431812816,
|
| 3502 |
+
"grad_norm": 41.1341667175293,
|
| 3503 |
+
"learning_rate": 0.00019741444659829543,
|
| 3504 |
+
"loss": 24.4596,
|
| 3505 |
+
"step": 496
|
| 3506 |
+
},
|
| 3507 |
+
{
|
| 3508 |
+
"epoch": 0.26809615319780183,
|
| 3509 |
+
"grad_norm": 35.3587760925293,
|
| 3510 |
+
"learning_rate": 0.00019740142868221713,
|
| 3511 |
+
"loss": 21.1434,
|
| 3512 |
+
"step": 497
|
| 3513 |
+
},
|
| 3514 |
+
{
|
| 3515 |
+
"epoch": 0.26863558207747545,
|
| 3516 |
+
"grad_norm": 47.48214340209961,
|
| 3517 |
+
"learning_rate": 0.00019738837850803226,
|
| 3518 |
+
"loss": 23.4752,
|
| 3519 |
+
"step": 498
|
| 3520 |
+
},
|
| 3521 |
+
{
|
| 3522 |
+
"epoch": 0.26917501095714913,
|
| 3523 |
+
"grad_norm": 44.637882232666016,
|
| 3524 |
+
"learning_rate": 0.00019737529608006293,
|
| 3525 |
+
"loss": 21.9525,
|
| 3526 |
+
"step": 499
|
| 3527 |
+
},
|
| 3528 |
+
{
|
| 3529 |
+
"epoch": 0.26971443983682275,
|
| 3530 |
+
"grad_norm": 31.005287170410156,
|
| 3531 |
+
"learning_rate": 0.00019736218140264185,
|
| 3532 |
+
"loss": 19.1622,
|
| 3533 |
+
"step": 500
|
| 3534 |
+
},
|
| 3535 |
+
{
|
| 3536 |
+
"epoch": 0.2702538687164964,
|
| 3537 |
+
"grad_norm": 32.10681915283203,
|
| 3538 |
+
"learning_rate": 0.0001973490344801124,
|
| 3539 |
+
"loss": 44.8021,
|
| 3540 |
+
"step": 501
|
| 3541 |
+
},
|
| 3542 |
+
{
|
| 3543 |
+
"epoch": 0.27079329759617005,
|
| 3544 |
+
"grad_norm": 67.818603515625,
|
| 3545 |
+
"learning_rate": 0.0001973358553168287,
|
| 3546 |
+
"loss": 90.5945,
|
| 3547 |
+
"step": 502
|
| 3548 |
+
},
|
| 3549 |
+
{
|
| 3550 |
+
"epoch": 0.2713327264758437,
|
| 3551 |
+
"grad_norm": 78.30387115478516,
|
| 3552 |
+
"learning_rate": 0.00019732264391715556,
|
| 3553 |
+
"loss": 101.037,
|
| 3554 |
+
"step": 503
|
| 3555 |
+
},
|
| 3556 |
+
{
|
| 3557 |
+
"epoch": 0.27187215535551734,
|
| 3558 |
+
"grad_norm": 92.50519561767578,
|
| 3559 |
+
"learning_rate": 0.00019730940028546835,
|
| 3560 |
+
"loss": 124.3723,
|
| 3561 |
+
"step": 504
|
| 3562 |
+
},
|
| 3563 |
+
{
|
| 3564 |
+
"epoch": 0.272411584235191,
|
| 3565 |
+
"grad_norm": 38.794246673583984,
|
| 3566 |
+
"learning_rate": 0.0001972961244261532,
|
| 3567 |
+
"loss": 105.1317,
|
| 3568 |
+
"step": 505
|
| 3569 |
+
},
|
| 3570 |
+
{
|
| 3571 |
+
"epoch": 0.27295101311486464,
|
| 3572 |
+
"grad_norm": 34.56374740600586,
|
| 3573 |
+
"learning_rate": 0.00019728281634360698,
|
| 3574 |
+
"loss": 101.3536,
|
| 3575 |
+
"step": 506
|
| 3576 |
+
},
|
| 3577 |
+
{
|
| 3578 |
+
"epoch": 0.27349044199453826,
|
| 3579 |
+
"grad_norm": 33.79701614379883,
|
| 3580 |
+
"learning_rate": 0.00019726947604223712,
|
| 3581 |
+
"loss": 105.4946,
|
| 3582 |
+
"step": 507
|
| 3583 |
+
},
|
| 3584 |
+
{
|
| 3585 |
+
"epoch": 0.27402987087421193,
|
| 3586 |
+
"grad_norm": 39.242740631103516,
|
| 3587 |
+
"learning_rate": 0.00019725610352646172,
|
| 3588 |
+
"loss": 82.6645,
|
| 3589 |
+
"step": 508
|
| 3590 |
+
},
|
| 3591 |
+
{
|
| 3592 |
+
"epoch": 0.27456929975388555,
|
| 3593 |
+
"grad_norm": 41.144683837890625,
|
| 3594 |
+
"learning_rate": 0.0001972426988007096,
|
| 3595 |
+
"loss": 99.5104,
|
| 3596 |
+
"step": 509
|
| 3597 |
+
},
|
| 3598 |
+
{
|
| 3599 |
+
"epoch": 0.27510872863355923,
|
| 3600 |
+
"grad_norm": 43.32292175292969,
|
| 3601 |
+
"learning_rate": 0.00019722926186942026,
|
| 3602 |
+
"loss": 90.6068,
|
| 3603 |
+
"step": 510
|
| 3604 |
+
},
|
| 3605 |
+
{
|
| 3606 |
+
"epoch": 0.27564815751323285,
|
| 3607 |
+
"grad_norm": 40.97383117675781,
|
| 3608 |
+
"learning_rate": 0.0001972157927370438,
|
| 3609 |
+
"loss": 71.8933,
|
| 3610 |
+
"step": 511
|
| 3611 |
+
},
|
| 3612 |
+
{
|
| 3613 |
+
"epoch": 0.2761875863929065,
|
| 3614 |
+
"grad_norm": 27.89875602722168,
|
| 3615 |
+
"learning_rate": 0.0001972022914080411,
|
| 3616 |
+
"loss": 66.0499,
|
| 3617 |
+
"step": 512
|
| 3618 |
+
},
|
| 3619 |
+
{
|
| 3620 |
+
"epoch": 0.27672701527258015,
|
| 3621 |
+
"grad_norm": 23.75403594970703,
|
| 3622 |
+
"learning_rate": 0.00019718875788688354,
|
| 3623 |
+
"loss": 59.9798,
|
| 3624 |
+
"step": 513
|
| 3625 |
+
},
|
| 3626 |
+
{
|
| 3627 |
+
"epoch": 0.2772664441522538,
|
| 3628 |
+
"grad_norm": 18.101530075073242,
|
| 3629 |
+
"learning_rate": 0.0001971751921780533,
|
| 3630 |
+
"loss": 55.1379,
|
| 3631 |
+
"step": 514
|
| 3632 |
+
},
|
| 3633 |
+
{
|
| 3634 |
+
"epoch": 0.27780587303192744,
|
| 3635 |
+
"grad_norm": 24.123146057128906,
|
| 3636 |
+
"learning_rate": 0.00019716159428604315,
|
| 3637 |
+
"loss": 51.0036,
|
| 3638 |
+
"step": 515
|
| 3639 |
+
},
|
| 3640 |
+
{
|
| 3641 |
+
"epoch": 0.2783453019116011,
|
| 3642 |
+
"grad_norm": 29.12915802001953,
|
| 3643 |
+
"learning_rate": 0.00019714796421535654,
|
| 3644 |
+
"loss": 35.74,
|
| 3645 |
+
"step": 516
|
| 3646 |
+
},
|
| 3647 |
+
{
|
| 3648 |
+
"epoch": 0.27888473079127474,
|
| 3649 |
+
"grad_norm": 41.40327072143555,
|
| 3650 |
+
"learning_rate": 0.00019713430197050756,
|
| 3651 |
+
"loss": 34.8342,
|
| 3652 |
+
"step": 517
|
| 3653 |
+
},
|
| 3654 |
+
{
|
| 3655 |
+
"epoch": 0.27942415967094836,
|
| 3656 |
+
"grad_norm": 65.70941162109375,
|
| 3657 |
+
"learning_rate": 0.00019712060755602102,
|
| 3658 |
+
"loss": 45.6267,
|
| 3659 |
+
"step": 518
|
| 3660 |
+
},
|
| 3661 |
+
{
|
| 3662 |
+
"epoch": 0.27996358855062203,
|
| 3663 |
+
"grad_norm": 37.733158111572266,
|
| 3664 |
+
"learning_rate": 0.00019710688097643227,
|
| 3665 |
+
"loss": 40.7,
|
| 3666 |
+
"step": 519
|
| 3667 |
+
},
|
| 3668 |
+
{
|
| 3669 |
+
"epoch": 0.28050301743029565,
|
| 3670 |
+
"grad_norm": 39.90540313720703,
|
| 3671 |
+
"learning_rate": 0.0001970931222362874,
|
| 3672 |
+
"loss": 52.105,
|
| 3673 |
+
"step": 520
|
| 3674 |
+
},
|
| 3675 |
+
{
|
| 3676 |
+
"epoch": 0.28104244630996933,
|
| 3677 |
+
"grad_norm": 41.023155212402344,
|
| 3678 |
+
"learning_rate": 0.0001970793313401432,
|
| 3679 |
+
"loss": 47.4019,
|
| 3680 |
+
"step": 521
|
| 3681 |
+
},
|
| 3682 |
+
{
|
| 3683 |
+
"epoch": 0.28158187518964295,
|
| 3684 |
+
"grad_norm": 39.340972900390625,
|
| 3685 |
+
"learning_rate": 0.00019706550829256693,
|
| 3686 |
+
"loss": 36.3784,
|
| 3687 |
+
"step": 522
|
| 3688 |
+
},
|
| 3689 |
+
{
|
| 3690 |
+
"epoch": 0.2821213040693166,
|
| 3691 |
+
"grad_norm": 31.36964988708496,
|
| 3692 |
+
"learning_rate": 0.0001970516530981367,
|
| 3693 |
+
"loss": 32.5883,
|
| 3694 |
+
"step": 523
|
| 3695 |
+
},
|
| 3696 |
+
{
|
| 3697 |
+
"epoch": 0.28266073294899025,
|
| 3698 |
+
"grad_norm": 31.426342010498047,
|
| 3699 |
+
"learning_rate": 0.00019703776576144105,
|
| 3700 |
+
"loss": 37.0281,
|
| 3701 |
+
"step": 524
|
| 3702 |
+
},
|
| 3703 |
+
{
|
| 3704 |
+
"epoch": 0.2832001618286639,
|
| 3705 |
+
"grad_norm": 48.170589447021484,
|
| 3706 |
+
"learning_rate": 0.00019702384628707945,
|
| 3707 |
+
"loss": 50.0541,
|
| 3708 |
+
"step": 525
|
| 3709 |
+
},
|
| 3710 |
+
{
|
| 3711 |
+
"epoch": 0.28373959070833754,
|
| 3712 |
+
"grad_norm": 58.017845153808594,
|
| 3713 |
+
"learning_rate": 0.0001970098946796617,
|
| 3714 |
+
"loss": 35.1185,
|
| 3715 |
+
"step": 526
|
| 3716 |
+
},
|
| 3717 |
+
{
|
| 3718 |
+
"epoch": 0.2842790195880112,
|
| 3719 |
+
"grad_norm": 44.51712417602539,
|
| 3720 |
+
"learning_rate": 0.0001969959109438085,
|
| 3721 |
+
"loss": 30.6861,
|
| 3722 |
+
"step": 527
|
| 3723 |
+
},
|
| 3724 |
+
{
|
| 3725 |
+
"epoch": 0.28481844846768484,
|
| 3726 |
+
"grad_norm": 38.26441955566406,
|
| 3727 |
+
"learning_rate": 0.00019698189508415102,
|
| 3728 |
+
"loss": 42.7979,
|
| 3729 |
+
"step": 528
|
| 3730 |
+
},
|
| 3731 |
+
{
|
| 3732 |
+
"epoch": 0.28535787734735846,
|
| 3733 |
+
"grad_norm": 33.41388702392578,
|
| 3734 |
+
"learning_rate": 0.00019696784710533115,
|
| 3735 |
+
"loss": 31.6934,
|
| 3736 |
+
"step": 529
|
| 3737 |
+
},
|
| 3738 |
+
{
|
| 3739 |
+
"epoch": 0.28589730622703213,
|
| 3740 |
+
"grad_norm": 39.14249038696289,
|
| 3741 |
+
"learning_rate": 0.00019695376701200145,
|
| 3742 |
+
"loss": 31.4034,
|
| 3743 |
+
"step": 530
|
| 3744 |
+
},
|
| 3745 |
+
{
|
| 3746 |
+
"epoch": 0.28643673510670575,
|
| 3747 |
+
"grad_norm": 38.64737319946289,
|
| 3748 |
+
"learning_rate": 0.000196939654808825,
|
| 3749 |
+
"loss": 35.3318,
|
| 3750 |
+
"step": 531
|
| 3751 |
+
},
|
| 3752 |
+
{
|
| 3753 |
+
"epoch": 0.28697616398637943,
|
| 3754 |
+
"grad_norm": 32.65852355957031,
|
| 3755 |
+
"learning_rate": 0.0001969255105004756,
|
| 3756 |
+
"loss": 33.1427,
|
| 3757 |
+
"step": 532
|
| 3758 |
+
},
|
| 3759 |
+
{
|
| 3760 |
+
"epoch": 0.28751559286605305,
|
| 3761 |
+
"grad_norm": 33.65852355957031,
|
| 3762 |
+
"learning_rate": 0.0001969113340916377,
|
| 3763 |
+
"loss": 31.0407,
|
| 3764 |
+
"step": 533
|
| 3765 |
+
},
|
| 3766 |
+
{
|
| 3767 |
+
"epoch": 0.2880550217457267,
|
| 3768 |
+
"grad_norm": 31.496322631835938,
|
| 3769 |
+
"learning_rate": 0.00019689712558700628,
|
| 3770 |
+
"loss": 32.1776,
|
| 3771 |
+
"step": 534
|
| 3772 |
+
},
|
| 3773 |
+
{
|
| 3774 |
+
"epoch": 0.28859445062540034,
|
| 3775 |
+
"grad_norm": 37.255680084228516,
|
| 3776 |
+
"learning_rate": 0.00019688288499128707,
|
| 3777 |
+
"loss": 32.4352,
|
| 3778 |
+
"step": 535
|
| 3779 |
+
},
|
| 3780 |
+
{
|
| 3781 |
+
"epoch": 0.289133879505074,
|
| 3782 |
+
"grad_norm": 35.74131774902344,
|
| 3783 |
+
"learning_rate": 0.00019686861230919635,
|
| 3784 |
+
"loss": 39.0239,
|
| 3785 |
+
"step": 536
|
| 3786 |
+
},
|
| 3787 |
+
{
|
| 3788 |
+
"epoch": 0.28967330838474764,
|
| 3789 |
+
"grad_norm": 62.805694580078125,
|
| 3790 |
+
"learning_rate": 0.00019685430754546107,
|
| 3791 |
+
"loss": 39.168,
|
| 3792 |
+
"step": 537
|
| 3793 |
+
},
|
| 3794 |
+
{
|
| 3795 |
+
"epoch": 0.2902127372644213,
|
| 3796 |
+
"grad_norm": 32.74406814575195,
|
| 3797 |
+
"learning_rate": 0.00019683997070481875,
|
| 3798 |
+
"loss": 27.3064,
|
| 3799 |
+
"step": 538
|
| 3800 |
+
},
|
| 3801 |
+
{
|
| 3802 |
+
"epoch": 0.29075216614409494,
|
| 3803 |
+
"grad_norm": 60.63595199584961,
|
| 3804 |
+
"learning_rate": 0.00019682560179201759,
|
| 3805 |
+
"loss": 37.3217,
|
| 3806 |
+
"step": 539
|
| 3807 |
+
},
|
| 3808 |
+
{
|
| 3809 |
+
"epoch": 0.2912915950237686,
|
| 3810 |
+
"grad_norm": 49.350975036621094,
|
| 3811 |
+
"learning_rate": 0.00019681120081181636,
|
| 3812 |
+
"loss": 32.6254,
|
| 3813 |
+
"step": 540
|
| 3814 |
+
},
|
| 3815 |
+
{
|
| 3816 |
+
"epoch": 0.29183102390344223,
|
| 3817 |
+
"grad_norm": 33.03507614135742,
|
| 3818 |
+
"learning_rate": 0.00019679676776898454,
|
| 3819 |
+
"loss": 23.6142,
|
| 3820 |
+
"step": 541
|
| 3821 |
+
},
|
| 3822 |
+
{
|
| 3823 |
+
"epoch": 0.29237045278311585,
|
| 3824 |
+
"grad_norm": 46.380985260009766,
|
| 3825 |
+
"learning_rate": 0.00019678230266830212,
|
| 3826 |
+
"loss": 26.1048,
|
| 3827 |
+
"step": 542
|
| 3828 |
+
},
|
| 3829 |
+
{
|
| 3830 |
+
"epoch": 0.29290988166278953,
|
| 3831 |
+
"grad_norm": 44.384132385253906,
|
| 3832 |
+
"learning_rate": 0.00019676780551455977,
|
| 3833 |
+
"loss": 19.0745,
|
| 3834 |
+
"step": 543
|
| 3835 |
+
},
|
| 3836 |
+
{
|
| 3837 |
+
"epoch": 0.29344931054246315,
|
| 3838 |
+
"grad_norm": 32.757320404052734,
|
| 3839 |
+
"learning_rate": 0.0001967532763125588,
|
| 3840 |
+
"loss": 33.5921,
|
| 3841 |
+
"step": 544
|
| 3842 |
+
},
|
| 3843 |
+
{
|
| 3844 |
+
"epoch": 0.2939887394221368,
|
| 3845 |
+
"grad_norm": 40.512939453125,
|
| 3846 |
+
"learning_rate": 0.000196738715067111,
|
| 3847 |
+
"loss": 23.9648,
|
| 3848 |
+
"step": 545
|
| 3849 |
+
},
|
| 3850 |
+
{
|
| 3851 |
+
"epoch": 0.29452816830181044,
|
| 3852 |
+
"grad_norm": 36.085330963134766,
|
| 3853 |
+
"learning_rate": 0.00019672412178303898,
|
| 3854 |
+
"loss": 25.8736,
|
| 3855 |
+
"step": 546
|
| 3856 |
+
},
|
| 3857 |
+
{
|
| 3858 |
+
"epoch": 0.2950675971814841,
|
| 3859 |
+
"grad_norm": 39.4991340637207,
|
| 3860 |
+
"learning_rate": 0.00019670949646517576,
|
| 3861 |
+
"loss": 35.8085,
|
| 3862 |
+
"step": 547
|
| 3863 |
+
},
|
| 3864 |
+
{
|
| 3865 |
+
"epoch": 0.29560702606115774,
|
| 3866 |
+
"grad_norm": 56.80205535888672,
|
| 3867 |
+
"learning_rate": 0.0001966948391183651,
|
| 3868 |
+
"loss": 21.2566,
|
| 3869 |
+
"step": 548
|
| 3870 |
+
},
|
| 3871 |
+
{
|
| 3872 |
+
"epoch": 0.2961464549408314,
|
| 3873 |
+
"grad_norm": 51.80792999267578,
|
| 3874 |
+
"learning_rate": 0.00019668014974746133,
|
| 3875 |
+
"loss": 19.3891,
|
| 3876 |
+
"step": 549
|
| 3877 |
+
},
|
| 3878 |
+
{
|
| 3879 |
+
"epoch": 0.29668588382050504,
|
| 3880 |
+
"grad_norm": 40.740726470947266,
|
| 3881 |
+
"learning_rate": 0.00019666542835732937,
|
| 3882 |
+
"loss": 17.442,
|
| 3883 |
+
"step": 550
|
| 3884 |
+
},
|
| 3885 |
+
{
|
| 3886 |
+
"epoch": 0.2972253127001787,
|
| 3887 |
+
"grad_norm": 43.78228759765625,
|
| 3888 |
+
"learning_rate": 0.00019665067495284476,
|
| 3889 |
+
"loss": 53.1444,
|
| 3890 |
+
"step": 551
|
| 3891 |
+
},
|
| 3892 |
+
{
|
| 3893 |
+
"epoch": 0.29776474157985233,
|
| 3894 |
+
"grad_norm": 68.15139770507812,
|
| 3895 |
+
"learning_rate": 0.00019663588953889363,
|
| 3896 |
+
"loss": 83.8455,
|
| 3897 |
+
"step": 552
|
| 3898 |
+
},
|
| 3899 |
+
{
|
| 3900 |
+
"epoch": 0.29830417045952595,
|
| 3901 |
+
"grad_norm": 57.72416305541992,
|
| 3902 |
+
"learning_rate": 0.00019662107212037273,
|
| 3903 |
+
"loss": 91.3314,
|
| 3904 |
+
"step": 553
|
| 3905 |
+
},
|
| 3906 |
+
{
|
| 3907 |
+
"epoch": 0.29884359933919963,
|
| 3908 |
+
"grad_norm": 70.40361785888672,
|
| 3909 |
+
"learning_rate": 0.0001966062227021894,
|
| 3910 |
+
"loss": 115.1381,
|
| 3911 |
+
"step": 554
|
| 3912 |
+
},
|
| 3913 |
+
{
|
| 3914 |
+
"epoch": 0.29938302821887325,
|
| 3915 |
+
"grad_norm": 33.6906623840332,
|
| 3916 |
+
"learning_rate": 0.00019659134128926156,
|
| 3917 |
+
"loss": 96.5649,
|
| 3918 |
+
"step": 555
|
| 3919 |
+
},
|
| 3920 |
+
{
|
| 3921 |
+
"epoch": 0.2999224570985469,
|
| 3922 |
+
"grad_norm": 41.24090576171875,
|
| 3923 |
+
"learning_rate": 0.00019657642788651776,
|
| 3924 |
+
"loss": 104.8012,
|
| 3925 |
+
"step": 556
|
| 3926 |
+
},
|
| 3927 |
+
{
|
| 3928 |
+
"epoch": 0.30046188597822054,
|
| 3929 |
+
"grad_norm": 62.62508773803711,
|
| 3930 |
+
"learning_rate": 0.00019656148249889714,
|
| 3931 |
+
"loss": 89.1584,
|
| 3932 |
+
"step": 557
|
| 3933 |
+
},
|
| 3934 |
+
{
|
| 3935 |
+
"epoch": 0.3010013148578942,
|
| 3936 |
+
"grad_norm": 54.20726013183594,
|
| 3937 |
+
"learning_rate": 0.00019654650513134937,
|
| 3938 |
+
"loss": 102.4601,
|
| 3939 |
+
"step": 558
|
| 3940 |
+
},
|
| 3941 |
+
{
|
| 3942 |
+
"epoch": 0.30154074373756784,
|
| 3943 |
+
"grad_norm": 51.19554138183594,
|
| 3944 |
+
"learning_rate": 0.00019653149578883482,
|
| 3945 |
+
"loss": 94.7273,
|
| 3946 |
+
"step": 559
|
| 3947 |
+
},
|
| 3948 |
+
{
|
| 3949 |
+
"epoch": 0.3020801726172415,
|
| 3950 |
+
"grad_norm": 50.297447204589844,
|
| 3951 |
+
"learning_rate": 0.00019651645447632437,
|
| 3952 |
+
"loss": 85.4999,
|
| 3953 |
+
"step": 560
|
| 3954 |
+
},
|
| 3955 |
+
{
|
| 3956 |
+
"epoch": 0.30261960149691514,
|
| 3957 |
+
"grad_norm": 43.541648864746094,
|
| 3958 |
+
"learning_rate": 0.00019650138119879952,
|
| 3959 |
+
"loss": 84.9936,
|
| 3960 |
+
"step": 561
|
| 3961 |
+
},
|
| 3962 |
+
{
|
| 3963 |
+
"epoch": 0.3031590303765888,
|
| 3964 |
+
"grad_norm": 30.611860275268555,
|
| 3965 |
+
"learning_rate": 0.00019648627596125233,
|
| 3966 |
+
"loss": 68.3871,
|
| 3967 |
+
"step": 562
|
| 3968 |
+
},
|
| 3969 |
+
{
|
| 3970 |
+
"epoch": 0.30369845925626243,
|
| 3971 |
+
"grad_norm": 18.373859405517578,
|
| 3972 |
+
"learning_rate": 0.00019647113876868546,
|
| 3973 |
+
"loss": 64.1806,
|
| 3974 |
+
"step": 563
|
| 3975 |
+
},
|
| 3976 |
+
{
|
| 3977 |
+
"epoch": 0.30423788813593605,
|
| 3978 |
+
"grad_norm": 17.967041015625,
|
| 3979 |
+
"learning_rate": 0.00019645596962611218,
|
| 3980 |
+
"loss": 58.1967,
|
| 3981 |
+
"step": 564
|
| 3982 |
+
},
|
| 3983 |
+
{
|
| 3984 |
+
"epoch": 0.30477731701560973,
|
| 3985 |
+
"grad_norm": 17.57683563232422,
|
| 3986 |
+
"learning_rate": 0.00019644076853855626,
|
| 3987 |
+
"loss": 48.7426,
|
| 3988 |
+
"step": 565
|
| 3989 |
+
},
|
| 3990 |
+
{
|
| 3991 |
+
"epoch": 0.30531674589528335,
|
| 3992 |
+
"grad_norm": 24.4635066986084,
|
| 3993 |
+
"learning_rate": 0.00019642553551105219,
|
| 3994 |
+
"loss": 45.5702,
|
| 3995 |
+
"step": 566
|
| 3996 |
+
},
|
| 3997 |
+
{
|
| 3998 |
+
"epoch": 0.305856174774957,
|
| 3999 |
+
"grad_norm": 44.31038284301758,
|
| 4000 |
+
"learning_rate": 0.0001964102705486449,
|
| 4001 |
+
"loss": 36.4538,
|
| 4002 |
+
"step": 567
|
| 4003 |
+
},
|
| 4004 |
+
{
|
| 4005 |
+
"epoch": 0.30639560365463064,
|
| 4006 |
+
"grad_norm": 45.66762924194336,
|
| 4007 |
+
"learning_rate": 0.00019639497365638993,
|
| 4008 |
+
"loss": 37.6228,
|
| 4009 |
+
"step": 568
|
| 4010 |
+
},
|
| 4011 |
+
{
|
| 4012 |
+
"epoch": 0.3069350325343043,
|
| 4013 |
+
"grad_norm": 45.2806282043457,
|
| 4014 |
+
"learning_rate": 0.00019637964483935346,
|
| 4015 |
+
"loss": 47.7514,
|
| 4016 |
+
"step": 569
|
| 4017 |
+
},
|
| 4018 |
+
{
|
| 4019 |
+
"epoch": 0.30747446141397794,
|
| 4020 |
+
"grad_norm": 44.627296447753906,
|
| 4021 |
+
"learning_rate": 0.00019636428410261218,
|
| 4022 |
+
"loss": 50.5934,
|
| 4023 |
+
"step": 570
|
| 4024 |
+
},
|
| 4025 |
+
{
|
| 4026 |
+
"epoch": 0.3080138902936516,
|
| 4027 |
+
"grad_norm": 39.8631706237793,
|
| 4028 |
+
"learning_rate": 0.00019634889145125336,
|
| 4029 |
+
"loss": 33.2035,
|
| 4030 |
+
"step": 571
|
| 4031 |
+
},
|
| 4032 |
+
{
|
| 4033 |
+
"epoch": 0.30855331917332524,
|
| 4034 |
+
"grad_norm": 43.88326644897461,
|
| 4035 |
+
"learning_rate": 0.00019633346689037486,
|
| 4036 |
+
"loss": 44.4418,
|
| 4037 |
+
"step": 572
|
| 4038 |
+
},
|
| 4039 |
+
{
|
| 4040 |
+
"epoch": 0.3090927480529989,
|
| 4041 |
+
"grad_norm": 31.599515914916992,
|
| 4042 |
+
"learning_rate": 0.0001963180104250851,
|
| 4043 |
+
"loss": 29.8656,
|
| 4044 |
+
"step": 573
|
| 4045 |
+
},
|
| 4046 |
+
{
|
| 4047 |
+
"epoch": 0.30963217693267253,
|
| 4048 |
+
"grad_norm": 29.062061309814453,
|
| 4049 |
+
"learning_rate": 0.00019630252206050307,
|
| 4050 |
+
"loss": 29.4416,
|
| 4051 |
+
"step": 574
|
| 4052 |
+
},
|
| 4053 |
+
{
|
| 4054 |
+
"epoch": 0.31017160581234615,
|
| 4055 |
+
"grad_norm": 35.07856750488281,
|
| 4056 |
+
"learning_rate": 0.00019628700180175833,
|
| 4057 |
+
"loss": 33.663,
|
| 4058 |
+
"step": 575
|
| 4059 |
+
},
|
| 4060 |
+
{
|
| 4061 |
+
"epoch": 0.3107110346920198,
|
| 4062 |
+
"grad_norm": 38.65933609008789,
|
| 4063 |
+
"learning_rate": 0.00019627144965399094,
|
| 4064 |
+
"loss": 43.6982,
|
| 4065 |
+
"step": 576
|
| 4066 |
+
},
|
| 4067 |
+
{
|
| 4068 |
+
"epoch": 0.31125046357169345,
|
| 4069 |
+
"grad_norm": 36.53346252441406,
|
| 4070 |
+
"learning_rate": 0.0001962558656223516,
|
| 4071 |
+
"loss": 41.9741,
|
| 4072 |
+
"step": 577
|
| 4073 |
+
},
|
| 4074 |
+
{
|
| 4075 |
+
"epoch": 0.3117898924513671,
|
| 4076 |
+
"grad_norm": 50.61214065551758,
|
| 4077 |
+
"learning_rate": 0.00019624024971200154,
|
| 4078 |
+
"loss": 31.3103,
|
| 4079 |
+
"step": 578
|
| 4080 |
+
},
|
| 4081 |
+
{
|
| 4082 |
+
"epoch": 0.31232932133104074,
|
| 4083 |
+
"grad_norm": 39.70477294921875,
|
| 4084 |
+
"learning_rate": 0.00019622460192811255,
|
| 4085 |
+
"loss": 40.1001,
|
| 4086 |
+
"step": 579
|
| 4087 |
+
},
|
| 4088 |
+
{
|
| 4089 |
+
"epoch": 0.3128687502107144,
|
| 4090 |
+
"grad_norm": 43.24115753173828,
|
| 4091 |
+
"learning_rate": 0.000196208922275867,
|
| 4092 |
+
"loss": 38.9648,
|
| 4093 |
+
"step": 580
|
| 4094 |
+
},
|
| 4095 |
+
{
|
| 4096 |
+
"epoch": 0.31340817909038804,
|
| 4097 |
+
"grad_norm": 49.614410400390625,
|
| 4098 |
+
"learning_rate": 0.00019619321076045778,
|
| 4099 |
+
"loss": 38.396,
|
| 4100 |
+
"step": 581
|
| 4101 |
+
},
|
| 4102 |
+
{
|
| 4103 |
+
"epoch": 0.3139476079700617,
|
| 4104 |
+
"grad_norm": 38.65335464477539,
|
| 4105 |
+
"learning_rate": 0.0001961774673870883,
|
| 4106 |
+
"loss": 33.8401,
|
| 4107 |
+
"step": 582
|
| 4108 |
+
},
|
| 4109 |
+
{
|
| 4110 |
+
"epoch": 0.31448703684973534,
|
| 4111 |
+
"grad_norm": 36.919837951660156,
|
| 4112 |
+
"learning_rate": 0.00019616169216097262,
|
| 4113 |
+
"loss": 40.8598,
|
| 4114 |
+
"step": 583
|
| 4115 |
+
},
|
| 4116 |
+
{
|
| 4117 |
+
"epoch": 0.315026465729409,
|
| 4118 |
+
"grad_norm": 34.90658187866211,
|
| 4119 |
+
"learning_rate": 0.00019614588508733524,
|
| 4120 |
+
"loss": 26.7875,
|
| 4121 |
+
"step": 584
|
| 4122 |
+
},
|
| 4123 |
+
{
|
| 4124 |
+
"epoch": 0.31556589460908263,
|
| 4125 |
+
"grad_norm": 36.6773796081543,
|
| 4126 |
+
"learning_rate": 0.00019613004617141132,
|
| 4127 |
+
"loss": 38.7512,
|
| 4128 |
+
"step": 585
|
| 4129 |
+
},
|
| 4130 |
+
{
|
| 4131 |
+
"epoch": 0.3161053234887563,
|
| 4132 |
+
"grad_norm": 38.80603790283203,
|
| 4133 |
+
"learning_rate": 0.00019611417541844645,
|
| 4134 |
+
"loss": 22.4567,
|
| 4135 |
+
"step": 586
|
| 4136 |
+
},
|
| 4137 |
+
{
|
| 4138 |
+
"epoch": 0.3166447523684299,
|
| 4139 |
+
"grad_norm": 39.85905838012695,
|
| 4140 |
+
"learning_rate": 0.00019609827283369687,
|
| 4141 |
+
"loss": 34.7722,
|
| 4142 |
+
"step": 587
|
| 4143 |
+
},
|
| 4144 |
+
{
|
| 4145 |
+
"epoch": 0.31718418124810355,
|
| 4146 |
+
"grad_norm": 42.714210510253906,
|
| 4147 |
+
"learning_rate": 0.00019608233842242925,
|
| 4148 |
+
"loss": 29.6514,
|
| 4149 |
+
"step": 588
|
| 4150 |
+
},
|
| 4151 |
+
{
|
| 4152 |
+
"epoch": 0.3177236101277772,
|
| 4153 |
+
"grad_norm": 28.49331283569336,
|
| 4154 |
+
"learning_rate": 0.00019606637218992092,
|
| 4155 |
+
"loss": 32.2811,
|
| 4156 |
+
"step": 589
|
| 4157 |
+
},
|
| 4158 |
+
{
|
| 4159 |
+
"epoch": 0.31826303900745084,
|
| 4160 |
+
"grad_norm": 38.48284912109375,
|
| 4161 |
+
"learning_rate": 0.0001960503741414597,
|
| 4162 |
+
"loss": 19.4347,
|
| 4163 |
+
"step": 590
|
| 4164 |
+
},
|
| 4165 |
+
{
|
| 4166 |
+
"epoch": 0.3188024678871245,
|
| 4167 |
+
"grad_norm": 40.46686553955078,
|
| 4168 |
+
"learning_rate": 0.00019603434428234389,
|
| 4169 |
+
"loss": 36.0755,
|
| 4170 |
+
"step": 591
|
| 4171 |
+
},
|
| 4172 |
+
{
|
| 4173 |
+
"epoch": 0.31934189676679814,
|
| 4174 |
+
"grad_norm": 33.52849578857422,
|
| 4175 |
+
"learning_rate": 0.00019601828261788236,
|
| 4176 |
+
"loss": 23.4967,
|
| 4177 |
+
"step": 592
|
| 4178 |
+
},
|
| 4179 |
+
{
|
| 4180 |
+
"epoch": 0.3198813256464718,
|
| 4181 |
+
"grad_norm": 36.89003372192383,
|
| 4182 |
+
"learning_rate": 0.0001960021891533946,
|
| 4183 |
+
"loss": 17.4822,
|
| 4184 |
+
"step": 593
|
| 4185 |
+
},
|
| 4186 |
+
{
|
| 4187 |
+
"epoch": 0.32042075452614543,
|
| 4188 |
+
"grad_norm": 47.023624420166016,
|
| 4189 |
+
"learning_rate": 0.00019598606389421055,
|
| 4190 |
+
"loss": 26.3533,
|
| 4191 |
+
"step": 594
|
| 4192 |
+
},
|
| 4193 |
+
{
|
| 4194 |
+
"epoch": 0.3209601834058191,
|
| 4195 |
+
"grad_norm": 53.969627380371094,
|
| 4196 |
+
"learning_rate": 0.00019596990684567063,
|
| 4197 |
+
"loss": 36.3338,
|
| 4198 |
+
"step": 595
|
| 4199 |
+
},
|
| 4200 |
+
{
|
| 4201 |
+
"epoch": 0.32149961228549273,
|
| 4202 |
+
"grad_norm": 31.71206283569336,
|
| 4203 |
+
"learning_rate": 0.00019595371801312588,
|
| 4204 |
+
"loss": 23.1099,
|
| 4205 |
+
"step": 596
|
| 4206 |
+
},
|
| 4207 |
+
{
|
| 4208 |
+
"epoch": 0.3220390411651664,
|
| 4209 |
+
"grad_norm": 34.602901458740234,
|
| 4210 |
+
"learning_rate": 0.00019593749740193784,
|
| 4211 |
+
"loss": 20.7281,
|
| 4212 |
+
"step": 597
|
| 4213 |
+
},
|
| 4214 |
+
{
|
| 4215 |
+
"epoch": 0.32257847004484,
|
| 4216 |
+
"grad_norm": 32.23836135864258,
|
| 4217 |
+
"learning_rate": 0.00019592124501747855,
|
| 4218 |
+
"loss": 19.1565,
|
| 4219 |
+
"step": 598
|
| 4220 |
+
},
|
| 4221 |
+
{
|
| 4222 |
+
"epoch": 0.32311789892451365,
|
| 4223 |
+
"grad_norm": 31.762807846069336,
|
| 4224 |
+
"learning_rate": 0.00019590496086513063,
|
| 4225 |
+
"loss": 20.822,
|
| 4226 |
+
"step": 599
|
| 4227 |
+
},
|
| 4228 |
+
{
|
| 4229 |
+
"epoch": 0.3236573278041873,
|
| 4230 |
+
"grad_norm": 38.77958297729492,
|
| 4231 |
+
"learning_rate": 0.00019588864495028712,
|
| 4232 |
+
"loss": 20.7172,
|
| 4233 |
+
"step": 600
|
| 4234 |
+
},
|
| 4235 |
+
{
|
| 4236 |
+
"epoch": 0.3236573278041873,
|
| 4237 |
+
"eval_loss": 1.5770864486694336,
|
| 4238 |
+
"eval_runtime": 140.3936,
|
| 4239 |
+
"eval_samples_per_second": 2.13,
|
| 4240 |
+
"eval_steps_per_second": 2.13,
|
| 4241 |
+
"step": 600
|
| 4242 |
}
|
| 4243 |
],
|
| 4244 |
"logging_steps": 1,
|
|
|
|
| 4267 |
"attributes": {}
|
| 4268 |
}
|
| 4269 |
},
|
| 4270 |
+
"total_flos": 1.0696873835715625e+18,
|
| 4271 |
"train_batch_size": 1,
|
| 4272 |
"trial_name": null,
|
| 4273 |
"trial_params": null
|
last-checkpoint/training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 6776
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:60cf1b2e269c89ca03e3babc5e2888ab355b88cc062be4295ed1566822fe04c6
|
| 3 |
size 6776
|