Saadaslamkhan commited on
Commit
c24cbc8
·
verified ·
1 Parent(s): 91e72dd

Run 4. Outer Step 44. Inner Step 0.

Browse files
Files changed (3) hide show
  1. config.json +11 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,23 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5563355,
272
- 5563359,
273
- 5563364,
274
- 5563368,
275
- 5563372,
276
- 5563376
 
 
 
 
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 82,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5555115,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5563381,
272
+ 5563385,
273
+ 5563390,
274
+ 5563394,
275
+ 5563399,
276
+ 5563403,
277
+ 5563408,
278
+ 5563412,
279
+ 5563417,
280
+ 5563421
281
  ],
282
  "block_size": 1024,
283
  "bos_token_id": 50256,
284
  "embd_pdrop": 0.1,
285
  "eos_token_id": 50256,
286
  "initializer_range": 0.02,
287
+ "inner_step": 0,
288
  "inner_steps": 0,
289
  "last_allreduce_block": 5555115,
290
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7466a9682715bcf9b256d9aa756f1c5ec252f01d12878232b7ff0c04ee5863b3
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:740410c0957d4237839329c39f9127fc9ec659277437a31a7efe076a65623d9a
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:04da0f80afc795a586db1d7f38c9737626a805e75039365d75846ec2adcbc2b1
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1402cfb71c598ec816d63162a8fe647cb903bc699f9a02e8ace9288edcbbf47
3
  size 4040701744