thieu86 commited on
Commit
32c2953
·
verified ·
1 Parent(s): cd05de4

Run 4. Outer Step 15. Inner Step 69.

Browse files
Files changed (3) hide show
  1. config.json +7 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,19 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5514974,
272
- 5514978,
273
- 5514982,
274
- 5514986,
275
- 5514989,
276
- 5514993
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 68,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5511564,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5514997,
272
+ 5515001,
273
+ 5515005,
274
+ 5515009,
275
+ 5515012,
276
+ 5515016
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
+ "inner_step": 69,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5511564,
286
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1765d3485343a2ba8173364667e7e2959e057d7cd6070c3e7fb012bc8e8215c0
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d06f7fa949fbe27b2be476ff57b3c08f954c7ed7fc82b408f6b3e2c0bb39a5e
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea67038305c9db138838bb156049b76c9a95626f0917a305a142f1899dd6abf5
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39f2fcd9424522469a12793e9e16ac188e332ec1b9ffcca6ab3cf5cb6ce75103
3
  size 4040701744