thieu86 commited on
Commit
b54a78c
·
verified ·
1 Parent(s): 1204746

Run 2. Outer Step 3. Inner Step 0.

Browse files
Files changed (3) hide show
  1. config.json +2 -8
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -267,19 +267,13 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [
271
- 5330551,
272
- 5330556,
273
- 5330560,
274
- 5330564,
275
- 5330568
276
- ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 131,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5323221,
285
  "layer_norm_epsilon": 1e-05,
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [],
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
274
  "eos_token_id": 50256,
275
  "initializer_range": 0.02,
276
+ "inner_step": 0,
277
  "inner_steps": 0,
278
  "last_allreduce_block": 5323221,
279
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f730ed94a9145c99e8432df833d6d042ed30096892fef58e5673ef52e12d419c
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7c29f115224ff3d6030c4329a6b0caa92698ca604f6aa237b0e97db3ae4e03a
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7229db1b8f7363cd23d5a34588768c54edefd0ddd15a890ec1c998d97ec7086
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f116e2c25f57c39ac087d792698fe4e2b8c09eaed86cad254ee90072a84333f2
3
  size 4040701744