thieu86 commited on
Commit
4bcb14f
·
verified ·
1 Parent(s): 58ce164

Run 4. Outer Step 14. Inner Step 44.

Browse files
Files changed (3) hide show
  1. config.json +7 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,19 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5512868,
272
- 5512872,
273
- 5512876,
274
- 5512880,
275
- 5512884,
276
- 5512889
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 43,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5511564,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5512893,
272
+ 5512897,
273
+ 5512901,
274
+ 5512905,
275
+ 5512909,
276
+ 5512913
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
+ "inner_step": 44,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5511564,
286
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:877d1007e42dba47d18a5e612caa1388f23f41f6925fe2cb89b6bf50f6fee39a
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cea385ef8d7b816a221252d2ef301e7d90fd4b55f38938248a0f86d2b8c8123
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:acb7a3a6361a0b7d9fe6034f02e5ab493cac672364a3f13c1b0a863d0cc53942
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd23d3ca87488b99f1e885bfbbd1abe0a0953c4d21b796b28b6c8f720e2a9495
3
  size 4040701744