thieu86 commited on
Commit
ce1eeb8
·
verified ·
1 Parent(s): 10c1f42

Outer Step 0. Inner Step 118. Batch Size 204

Browse files
Files changed (3) hide show
  1. config.json +11 -11
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,23 +268,23 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5140372,
272
- 5140412,
273
- 5140455,
274
- 5140525,
275
- 5140566,
276
- 5140665,
277
- 5140702,
278
- 5140722,
279
- 5140766,
280
- 5140799
281
  ],
282
  "block_size": 1024,
283
  "bos_token_id": 50256,
284
  "embd_pdrop": 0.1,
285
  "eos_token_id": 50256,
286
  "initializer_range": 0.02,
287
- "inner_step": 112,
288
  "inner_steps": 0,
289
  "last_allreduce_block": 5072405,
290
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5140836,
272
+ 5140881,
273
+ 5140905,
274
+ 5140939,
275
+ 5140945,
276
+ 5140951,
277
+ 5140956,
278
+ 5140961,
279
+ 5140974,
280
+ 5140995
281
  ],
282
  "block_size": 1024,
283
  "bos_token_id": 50256,
284
  "embd_pdrop": 0.1,
285
  "eos_token_id": 50256,
286
  "initializer_range": 0.02,
287
+ "inner_step": 118,
288
  "inner_steps": 0,
289
  "last_allreduce_block": 5072405,
290
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:76f3c3d028d99eeb57af329a6b0207bc68d4f1aa523ab2474467d6352124d0c5
3
  size 8081781770
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32356f227a88517e27964a265660d7a31f4cb60d9b9c6b77ae53a4bb879c4b61
3
  size 8081781770
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:41faa70560f53fd26efd73421fb84a48c7da227f042461865a9da6976a22947e
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2988d7661363e68b6f0b13e3526b87592be492488e4c0f8ea0c07b9b7c282db6
3
  size 4040701744