philip324mark commited on
Commit
96abdb7
·
verified ·
1 Parent(s): 4522ac7

Run 4. Outer Step 82. Inner Step 32.

Browse files
Files changed (3) hide show
  1. config.json +5 -2
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -267,13 +267,16 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [],
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
274
  "eos_token_id": 50256,
275
  "initializer_range": 0.02,
276
- "inner_step": 29,
277
  "inner_steps": 0,
278
  "last_allreduce_block": 5664450,
279
  "layer_norm_epsilon": 1e-05,
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [
271
+ 5672243,
272
+ 5672274
273
+ ],
274
  "block_size": 1024,
275
  "bos_token_id": 50256,
276
  "embd_pdrop": 0.1,
277
  "eos_token_id": 50256,
278
  "initializer_range": 0.02,
279
+ "inner_step": 32,
280
  "inner_steps": 0,
281
  "last_allreduce_block": 5664450,
282
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d81dff62287eb6f7865b98e735d25915f42eb42381159c75e97987d186cb96be
3
  size 8081782503
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bc545d0184a15e8ab59c6327d38fd86b02f0dcb0e2b466000bb7eed3bedbcd3
3
  size 8081782503
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c5b2434a79acb3ca356f7df9c57d708f1416430a4c1b5b2344086a70fc8b05d
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08dfedd2f12a59008ea6cbeb53ac2ac284a3f3ef64f604e8357f87bc512b37c2
3
  size 4040701744