thieu86 commited on
Commit
d23e569
·
verified ·
1 Parent(s): 334c808

Run 4. Outer Step 15. Inner Step 70.

Browse files
Files changed (3) hide show
  1. config.json +8 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,20 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5515061,
272
- 5515064,
273
- 5515069,
274
- 5515072,
275
- 5515076,
276
- 5515080
 
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 69,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5511564,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5515085,
272
+ 5515088,
273
+ 5515092,
274
+ 5515096,
275
+ 5515100,
276
+ 5515104,
277
+ 5515107
278
  ],
279
  "block_size": 1024,
280
  "bos_token_id": 50256,
281
  "embd_pdrop": 0.1,
282
  "eos_token_id": 50256,
283
  "initializer_range": 0.02,
284
+ "inner_step": 70,
285
  "inner_steps": 0,
286
  "last_allreduce_block": 5511564,
287
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:562fcc9e24d29f74a06cc54a32f172644879763f30ea226b8e19bc83517d6ec9
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99559d3857e2f40e64d05e9ad7a7f0d2d0b54e713f7d29f24364f5f5313b60b5
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7bc06195c7791d1ba2312608ec690bf764c8a3de320e8ef1689d67dba4829168
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d8e2368c5b90156472e8b8751f99d84e7322fcfdb05c2c40b99e15da1370c25
3
  size 4040701744