crypt0trading commited on
Commit
8515d01
·
verified ·
1 Parent(s): c8d9163

Outer Step 8. Inner Step 0. Batch Size 0

Browse files
Files changed (3) hide show
  1. config.json +5 -16
  2. inner_optimizer.pt +2 -2
  3. model.safetensors +1 -1
config.json CHANGED
@@ -146,7 +146,7 @@
146
  "225": "NON_PARTICIPATING",
147
  "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
- "228": "SUCCESS",
150
  "229": "NON_PARTICIPATING",
151
  "23": "NON_PARTICIPATING",
152
  "230": "NON_PARTICIPATING",
@@ -160,7 +160,7 @@
160
  "238": "NON_PARTICIPATING",
161
  "239": "NON_PARTICIPATING",
162
  "24": "NON_PARTICIPATING",
163
- "240": "SUCCESS",
164
  "241": "NON_PARTICIPATING",
165
  "242": "NON_PARTICIPATING",
166
  "243": "NON_PARTICIPATING",
@@ -267,26 +267,15 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [
271
- 5277036,
272
- 5277041,
273
- 5277045,
274
- 5277049,
275
- 5277053,
276
- 5277058,
277
- 5277062,
278
- 5277066,
279
- 5277071,
280
- 5277075
281
- ],
282
  "block_size": 1024,
283
  "bos_token_id": 50256,
284
  "embd_pdrop": 0.1,
285
  "eos_token_id": 50256,
286
  "initializer_range": 0.02,
287
- "inner_step": 652,
288
  "inner_steps": 0,
289
- "last_allreduce_block": 5272223,
290
  "layer_norm_epsilon": 1e-05,
291
  "model_type": "gpt_optimized",
292
  "n_embd": 1280,
 
146
  "225": "NON_PARTICIPATING",
147
  "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
+ "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
151
  "23": "NON_PARTICIPATING",
152
  "230": "NON_PARTICIPATING",
 
160
  "238": "NON_PARTICIPATING",
161
  "239": "NON_PARTICIPATING",
162
  "24": "NON_PARTICIPATING",
163
+ "240": "NON_PARTICIPATING",
164
  "241": "NON_PARTICIPATING",
165
  "242": "NON_PARTICIPATING",
166
  "243": "NON_PARTICIPATING",
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [],
 
 
 
 
 
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
274
  "eos_token_id": 50256,
275
  "initializer_range": 0.02,
276
+ "inner_step": 0,
277
  "inner_steps": 0,
278
+ "last_allreduce_block": 5283755,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c2134b2f6ad916bf2ed8451e7cbd555ecc0684caf6d4189a9416414d53a388a
3
- size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:492d06eeccafd6003662b45a8cb312d2adc5edf92fda9ed4fc27f30b35362c6a
3
+ size 2944
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ba86dc5655643ab04088a5e1a9b60aadfe8bb2275359501066f74906c19da4a0
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c933a9ed1f5f7c6efadfcb8d1009c01cfbc8fb83615528124ae7a9849a72cf46
3
  size 4040701744