Run 4. Outer Step 13. Inner Step 7.
Browse files- config.json +8 -7
- inner_optimizer.pt +1 -1
- model.safetensors +1 -1
config.json
CHANGED
|
@@ -268,19 +268,20 @@
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
|
|
|
| 277 |
],
|
| 278 |
"block_size": 1024,
|
| 279 |
"bos_token_id": 50256,
|
| 280 |
"embd_pdrop": 0.1,
|
| 281 |
"eos_token_id": 50256,
|
| 282 |
"initializer_range": 0.02,
|
| 283 |
-
"inner_step":
|
| 284 |
"inner_steps": 0,
|
| 285 |
"last_allreduce_block": 5509976,
|
| 286 |
"layer_norm_epsilon": 1e-05,
|
|
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
+
5511580,
|
| 272 |
+
5511584,
|
| 273 |
+
5511588,
|
| 274 |
+
5511592,
|
| 275 |
+
5511596,
|
| 276 |
+
5511601,
|
| 277 |
+
5511605
|
| 278 |
],
|
| 279 |
"block_size": 1024,
|
| 280 |
"bos_token_id": 50256,
|
| 281 |
"embd_pdrop": 0.1,
|
| 282 |
"eos_token_id": 50256,
|
| 283 |
"initializer_range": 0.02,
|
| 284 |
+
"inner_step": 7,
|
| 285 |
"inner_steps": 0,
|
| 286 |
"last_allreduce_block": 5509976,
|
| 287 |
"layer_norm_epsilon": 1e-05,
|
inner_optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 8081782026
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d5fe05c601a6614d35b610de45b2cdec2d8773207990a293b73b5c84118f83c5
|
| 3 |
size 8081782026
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4040701744
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3ce902b6a32662f07847b29252ffa523ab45bec20d40560b995930f2e84932d
|
| 3 |
size 4040701744
|