thieu86 commited on
Commit
8b68dd4
·
verified ·
1 Parent(s): d2f2490

Run 4. Outer Step 12. Inner Step 36.

Browse files
Files changed (3) hide show
  1. config.json +7 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,19 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5509595,
272
- 5509599,
273
- 5509603,
274
- 5509607,
275
- 5509611,
276
- 5509615
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 35,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5508496,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5509619,
272
+ 5509624,
273
+ 5509628,
274
+ 5509632,
275
+ 5509636,
276
+ 5509640
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
+ "inner_step": 36,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5508496,
286
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eef3f1aa20bf6e866965de5802f45791e5bf4676da6c50b7fd67e1a4e66e7467
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c25d46663a67d059b4dbdc7d4d99417b3ad70f4a2f6b193692274016a8c3a562
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f338256a2386f54dbd3f42b95ec464d763452fe4ab0eda3e889b20d53439cffd
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83b383a5a4f5a4954ec45ddbbb0b0446548178f55fc405db4614761791eae37e
3
  size 4040701744