thieu86 commited on
Commit
a7e6286
·
verified ·
1 Parent(s): 3d06fa4

Outer Step 2. Inner Step 588. Batch Size 348

Browse files
Files changed (2) hide show
  1. config.json +6 -6
  2. model.safetensors +1 -1
config.json CHANGED
@@ -268,18 +268,18 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5081235,
272
- 5081244,
273
- 5081249,
274
- 5081262,
275
- 5081266
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 584,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5072405,
285
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5081284,
272
+ 5081289,
273
+ 5081295,
274
+ 5081300,
275
+ 5081307
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 588,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5072405,
285
  "layer_norm_epsilon": 1e-05,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:adfdefb82a33b1cb1d17cbdaa987bed7523a9e4b5b59dd069f640f4839c3996d
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:383bbce3e60e92191a56027e85f3b050b4886834a159b0a28cf37d673b98eaf3
3
  size 4040701744