thieu86 commited on
Commit
498857a
·
verified ·
1 Parent(s): e330d26

Run 4. Outer Step 36. Inner Step 30.

Browse files
Files changed (3) hide show
  1. config.json +7 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,19 +268,19 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5550282,
272
- 5550286,
273
- 5550290,
274
- 5550294,
275
- 5550298,
276
- 5550302
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
- "inner_step": 29,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5546853,
286
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5550307,
272
+ 5550311,
273
+ 5550315,
274
+ 5550320,
275
+ 5550324,
276
+ 5550328
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
+ "inner_step": 30,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5546853,
286
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b83cf6c59a5df862b328813a5e14bd59d52a13cc7bc41d153c2737a3285c7c85
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d90f1731b2590b0a0a7d7fe8f80646085a7cfe6a389ee9b11ceeb1c7b526f460
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f174c6929130f9608b8517748f9e9b709c8171632b8779f2f33698fd094deb4
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f5ccb456f5940106e8b0b7fe0c9a549c0a6f19f1472aa08a0fe3b3d8bdae20e
3
  size 4040701744