philivy commited on
Commit
5e01b27
·
verified ·
1 Parent(s): 9eaac8b

Run 2. Outer Step 9. Inner Step 0.

Browse files
Files changed (3) hide show
  1. config.json +23 -29
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -3,18 +3,18 @@
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
- "1": "NON_PARTICIPATING",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
- "101": "NON_PARTICIPATING",
10
- "102": "NON_PARTICIPATING",
11
  "103": "NON_PARTICIPATING",
12
  "104": "NON_PARTICIPATING",
13
  "105": "NON_PARTICIPATING",
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
- "109": "NON_PARTICIPATING",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
@@ -75,12 +75,12 @@
75
  "161": "NON_PARTICIPATING",
76
  "162": "NON_PARTICIPATING",
77
  "163": "NON_PARTICIPATING",
78
- "164": "NON_PARTICIPATING",
79
  "165": "NON_PARTICIPATING",
80
  "166": "NON_PARTICIPATING",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
- "169": "NON_PARTICIPATING",
84
  "17": "NON_PARTICIPATING",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
@@ -89,7 +89,7 @@
89
  "174": "NON_PARTICIPATING",
90
  "175": "NON_PARTICIPATING",
91
  "176": "NON_PARTICIPATING",
92
- "177": "NON_PARTICIPATING",
93
  "178": "NON_PARTICIPATING",
94
  "179": "NON_PARTICIPATING",
95
  "18": "NON_PARTICIPATING",
@@ -107,7 +107,7 @@
107
  "190": "NON_PARTICIPATING",
108
  "191": "NON_PARTICIPATING",
109
  "192": "SUCCESS",
110
- "193": "NON_PARTICIPATING",
111
  "194": "NON_PARTICIPATING",
112
  "195": "NON_PARTICIPATING",
113
  "196": "NON_PARTICIPATING",
@@ -115,7 +115,7 @@
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
- "20": "NON_PARTICIPATING",
119
  "200": "SUCCESS",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
@@ -137,16 +137,16 @@
137
  "217": "NON_PARTICIPATING",
138
  "218": "NON_PARTICIPATING",
139
  "219": "NON_PARTICIPATING",
140
- "22": "NON_PARTICIPATING",
141
  "220": "NON_PARTICIPATING",
142
- "221": "NON_PARTICIPATING",
143
  "222": "NON_PARTICIPATING",
144
  "223": "NON_PARTICIPATING",
145
  "224": "NON_PARTICIPATING",
146
- "225": "NON_PARTICIPATING",
147
  "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
- "228": "SUCCESS",
150
  "229": "NON_PARTICIPATING",
151
  "23": "SUCCESS",
152
  "230": "NON_PARTICIPATING",
@@ -184,21 +184,21 @@
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
- "32": "NON_PARTICIPATING",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
- "35": "NON_PARTICIPATING",
191
  "36": "SUCCESS",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
- "39": "SUCCESS",
195
  "4": "NON_PARTICIPATING",
196
  "40": "NON_PARTICIPATING",
197
  "41": "NON_PARTICIPATING",
198
- "42": "NON_PARTICIPATING",
199
- "43": "NON_PARTICIPATING",
200
  "44": "NON_PARTICIPATING",
201
- "45": "SUCCESS",
202
  "46": "NON_PARTICIPATING",
203
  "47": "NON_PARTICIPATING",
204
  "48": "NON_PARTICIPATING",
@@ -238,7 +238,7 @@
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
  "80": "NON_PARTICIPATING",
241
- "81": "NON_PARTICIPATING",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
@@ -267,21 +267,15 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [
271
- 5338021,
272
- 5338025,
273
- 5338029,
274
- 5338033,
275
- 5338037
276
- ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 375,
283
  "inner_steps": 0,
284
- "last_allreduce_block": 5332469,
285
  "layer_norm_epsilon": 1e-05,
286
  "model_type": "gpt_optimized",
287
  "n_embd": 1280,
 
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
+ "1": "SUCCESS",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
+ "101": "SUCCESS",
10
+ "102": "SUCCESS",
11
  "103": "NON_PARTICIPATING",
12
  "104": "NON_PARTICIPATING",
13
  "105": "NON_PARTICIPATING",
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
+ "109": "SUCCESS",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
 
75
  "161": "NON_PARTICIPATING",
76
  "162": "NON_PARTICIPATING",
77
  "163": "NON_PARTICIPATING",
78
+ "164": "SUCCESS",
79
  "165": "NON_PARTICIPATING",
80
  "166": "NON_PARTICIPATING",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
+ "169": "SUCCESS",
84
  "17": "NON_PARTICIPATING",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
 
89
  "174": "NON_PARTICIPATING",
90
  "175": "NON_PARTICIPATING",
91
  "176": "NON_PARTICIPATING",
92
+ "177": "SUCCESS",
93
  "178": "NON_PARTICIPATING",
94
  "179": "NON_PARTICIPATING",
95
  "18": "NON_PARTICIPATING",
 
107
  "190": "NON_PARTICIPATING",
108
  "191": "NON_PARTICIPATING",
109
  "192": "SUCCESS",
110
+ "193": "SUCCESS",
111
  "194": "NON_PARTICIPATING",
112
  "195": "NON_PARTICIPATING",
113
  "196": "NON_PARTICIPATING",
 
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
+ "20": "SUCCESS",
119
  "200": "SUCCESS",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
 
137
  "217": "NON_PARTICIPATING",
138
  "218": "NON_PARTICIPATING",
139
  "219": "NON_PARTICIPATING",
140
+ "22": "SUCCESS",
141
  "220": "NON_PARTICIPATING",
142
+ "221": "SUCCESS",
143
  "222": "NON_PARTICIPATING",
144
  "223": "NON_PARTICIPATING",
145
  "224": "NON_PARTICIPATING",
146
+ "225": "SUCCESS",
147
  "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
+ "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
151
  "23": "SUCCESS",
152
  "230": "NON_PARTICIPATING",
 
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
+ "32": "SUCCESS",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
+ "35": "SUCCESS",
191
  "36": "SUCCESS",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
+ "39": "NON_PARTICIPATING",
195
  "4": "NON_PARTICIPATING",
196
  "40": "NON_PARTICIPATING",
197
  "41": "NON_PARTICIPATING",
198
+ "42": "SUCCESS",
199
+ "43": "SUCCESS",
200
  "44": "NON_PARTICIPATING",
201
+ "45": "NON_PARTICIPATING",
202
  "46": "NON_PARTICIPATING",
203
  "47": "NON_PARTICIPATING",
204
  "48": "NON_PARTICIPATING",
 
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
  "80": "NON_PARTICIPATING",
241
+ "81": "SUCCESS",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [],
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
274
  "eos_token_id": 50256,
275
  "initializer_range": 0.02,
276
+ "inner_step": 0,
277
  "inner_steps": 0,
278
+ "last_allreduce_block": 5351170,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1b1eaeb78c91b7aa0dfce940caf6bc8b1826416d639b815269454f64b29ea66
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57a7560b7d6dcff2f9b986cdf1519151986f63582928104d82fbbcf373d695a7
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3948ef8042932de49b3d5e8ee87fd23f70fb088bda1c43190d1a44c6d2bb8402
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30a3c2a1f583992135596ada96883cba467a485b6ef53f21ba48c2355f414587
3
  size 4040701744