Run 4. Outer Step 98. Inner Step 12.
Browse files- config.json +3 -3
- inner_optimizer.pt +1 -1
- model.safetensors +1 -1
config.json
CHANGED
|
@@ -268,15 +268,15 @@
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
-
|
| 272 |
-
|
| 273 |
],
|
| 274 |
"block_size": 1024,
|
| 275 |
"bos_token_id": 50256,
|
| 276 |
"embd_pdrop": 0.1,
|
| 277 |
"eos_token_id": 50256,
|
| 278 |
"initializer_range": 0.02,
|
| 279 |
-
"inner_step":
|
| 280 |
"inner_steps": 0,
|
| 281 |
"last_allreduce_block": 5692898,
|
| 282 |
"layer_norm_epsilon": 1e-05,
|
|
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
+
5693754,
|
| 272 |
+
5693786
|
| 273 |
],
|
| 274 |
"block_size": 1024,
|
| 275 |
"bos_token_id": 50256,
|
| 276 |
"embd_pdrop": 0.1,
|
| 277 |
"eos_token_id": 50256,
|
| 278 |
"initializer_range": 0.02,
|
| 279 |
+
"inner_step": 12,
|
| 280 |
"inner_steps": 0,
|
| 281 |
"last_allreduce_block": 5692898,
|
| 282 |
"layer_norm_epsilon": 1e-05,
|
inner_optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 8081782503
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3f4cbda41f987a85fe68e246d15130e2ddff24fab0598f32786379067800a9c0
|
| 3 |
size 8081782503
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4040701744
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8d02ffe53331b1fde56077c1287a5e0f2408f67a45233bdd2d37caaa352f1977
|
| 3 |
size 4040701744
|