File size: 7,574 Bytes
c51a590
 
 
 
9fddada
3c14bf1
9fddada
b6ccd43
9fddada
21db397
c51a590
d5e85f7
a45604e
c51a590
9fddada
3c14bf1
c51a590
08396fd
b1ffc29
9fddada
3c14bf1
 
 
 
70ce814
822ce0e
c51a590
 
b1ffc29
3c14bf1
d5e85f7
996b597
822ce0e
c51a590
3c14bf1
c51a590
 
3c14bf1
 
 
a45604e
c51a590
d6f145e
822ce0e
9fddada
a45604e
 
b6ccd43
9fddada
c51a590
d6f145e
822ce0e
a45604e
9fddada
822ce0e
3c14bf1
792d539
08396fd
822ce0e
3c14bf1
08396fd
3c14bf1
792d539
c51a590
 
794a021
a45604e
c51a590
996b597
3c14bf1
9fddada
c51a590
792d539
822ce0e
996b597
9fddada
3c14bf1
d6f145e
08396fd
3c14bf1
794a021
822ce0e
c51a590
d6f145e
a45604e
b6ccd43
822ce0e
 
794a021
c51a590
3c14bf1
 
9fddada
08396fd
a45604e
c51a590
e020039
3c14bf1
9fddada
794a021
9fddada
a45604e
 
b6ccd43
3c14bf1
b6ccd43
 
a45604e
b6ccd43
c51a590
08396fd
e020039
b6ccd43
a45604e
3c14bf1
996b597
c51a590
a45604e
9fddada
08396fd
794a021
9fddada
 
c51a590
 
21db397
a45604e
b6ccd43
3c14bf1
b6ccd43
e020039
a45604e
b6ccd43
 
 
 
08396fd
b6ccd43
c51a590
996b597
08396fd
822ce0e
c51a590
9fddada
 
 
 
a45604e
3c14bf1
b6ccd43
 
c51a590
 
792d539
08396fd
792d539
c51a590
792d539
d6f145e
3c14bf1
c51a590
 
 
b6ccd43
c51a590
a45604e
996b597
9fddada
21db397
3c14bf1
 
c51a590
3c14bf1
21db397
3c14bf1
a45604e
9fddada
b6ccd43
9fddada
c51a590
b6ccd43
c51a590
b6ccd43
3c14bf1
9fddada
 
c51a590
a45604e
1bf0b0b
08396fd
c51a590
a45604e
9fddada
c51a590
08396fd
3c14bf1
 
08396fd
d5e85f7
3c14bf1
9fddada
3c14bf1
794a021
c51a590
 
794a021
b1ffc29
3c14bf1
c51a590
 
792d539
9fddada
792d539
996b597
08396fd
c51a590
9fddada
1bf0b0b
c51a590
9fddada
 
794a021
3c14bf1
 
 
 
 
9fddada
 
 
3c14bf1
70ce814
794a021
c51a590
9fddada
b6ccd43
c51a590
 
822ce0e
794a021
70ce814
9fddada
 
a45604e
9fddada
c51a590
792d539
c51a590
3c14bf1
c51a590
 
 
1bf0b0b
 
08396fd
b6ccd43
792d539
b1ffc29
b6ccd43
c51a590
 
 
 
 
 
 
e8726d5
 
c51a590
 
 
 
 
 
e8726d5
c51a590
9fddada
c51a590
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
{
  "_name_or_path": "distributed/llama-1b",
  "all_reduce_scores": {
    "0": "NOT_ALIVE",
    "1": "SUCCESS",
    "10": "NOT_ALIVE",
    "100": "NOT_ALIVE",
    "101": "NON_PARTICIPATING",
    "102": "NOT_ALIVE",
    "103": "NOT_ALIVE",
    "104": "NOT_ALIVE",
    "105": "NON_PARTICIPATING",
    "106": "NOT_ALIVE",
    "107": "NOT_ALIVE",
    "108": "NON_PARTICIPATING",
    "109": "NOT_ALIVE",
    "11": "NOT_ALIVE",
    "110": "NOT_ALIVE",
    "111": "NON_PARTICIPATING",
    "112": "NON_PARTICIPATING",
    "113": "NOT_ALIVE",
    "114": "NOT_ALIVE",
    "115": "NOT_ALIVE",
    "116": "NOT_ALIVE",
    "117": "NOT_ALIVE",
    "118": "NON_PARTICIPATING",
    "119": "NOT_ALIVE",
    "12": "NOT_ALIVE",
    "120": "NON_PARTICIPATING",
    "121": "NOT_ALIVE",
    "122": "NON_PARTICIPATING",
    "123": "NOT_ALIVE",
    "124": "NON_PARTICIPATING",
    "125": "NOT_ALIVE",
    "126": "NOT_ALIVE",
    "127": "NOT_ALIVE",
    "128": "NOT_ALIVE",
    "129": "NOT_ALIVE",
    "13": "NOT_ALIVE",
    "130": "NOT_ALIVE",
    "131": "NOT_ALIVE",
    "132": "NOT_ALIVE",
    "133": "SUCCESS",
    "134": "NOT_ALIVE",
    "135": "NON_PARTICIPATING",
    "136": "NON_PARTICIPATING",
    "137": "NON_PARTICIPATING",
    "138": "NON_PARTICIPATING",
    "139": "NOT_ALIVE",
    "14": "NOT_ALIVE",
    "140": "SUCCESS",
    "141": "NON_PARTICIPATING",
    "142": "NON_PARTICIPATING",
    "143": "NON_PARTICIPATING",
    "144": "SUCCESS",
    "145": "NOT_ALIVE",
    "146": "NON_PARTICIPATING",
    "147": "NON_PARTICIPATING",
    "148": "SUCCESS",
    "149": "NOT_ALIVE",
    "15": "SUCCESS",
    "150": "NOT_ALIVE",
    "151": "NON_PARTICIPATING",
    "152": "NOT_ALIVE",
    "153": "NOT_ALIVE",
    "154": "NON_PARTICIPATING",
    "155": "NON_PARTICIPATING",
    "156": "NOT_ALIVE",
    "157": "NOT_ALIVE",
    "158": "NON_PARTICIPATING",
    "159": "NOT_ALIVE",
    "16": "NOT_ALIVE",
    "160": "NON_PARTICIPATING",
    "161": "NON_PARTICIPATING",
    "162": "NON_PARTICIPATING",
    "163": "NOT_ALIVE",
    "164": "NOT_ALIVE",
    "165": "NON_PARTICIPATING",
    "166": "NON_PARTICIPATING",
    "167": "NOT_ALIVE",
    "168": "NON_PARTICIPATING",
    "169": "NON_PARTICIPATING",
    "17": "NOT_ALIVE",
    "170": "NON_PARTICIPATING",
    "171": "NON_PARTICIPATING",
    "172": "NON_PARTICIPATING",
    "173": "NON_PARTICIPATING",
    "174": "NON_PARTICIPATING",
    "175": "NON_PARTICIPATING",
    "176": "NOT_ALIVE",
    "177": "NOT_ALIVE",
    "178": "NOT_ALIVE",
    "179": "NOT_ALIVE",
    "18": "SUCCESS",
    "180": "NON_PARTICIPATING",
    "181": "NOT_ALIVE",
    "182": "NON_PARTICIPATING",
    "183": "NOT_ALIVE",
    "184": "NON_PARTICIPATING",
    "185": "NON_PARTICIPATING",
    "186": "NON_PARTICIPATING",
    "187": "SUCCESS",
    "188": "SUCCESS",
    "189": "SUCCESS",
    "19": "NOT_ALIVE",
    "190": "NON_PARTICIPATING",
    "191": "NON_PARTICIPATING",
    "192": "NON_PARTICIPATING",
    "193": "NOT_ALIVE",
    "194": "NOT_ALIVE",
    "195": "NON_PARTICIPATING",
    "196": "NOT_ALIVE",
    "197": "SUCCESS",
    "198": "NON_PARTICIPATING",
    "199": "NOT_ALIVE",
    "2": "NOT_ALIVE",
    "20": "NOT_ALIVE",
    "200": "NON_PARTICIPATING",
    "201": "SUCCESS",
    "202": "NON_PARTICIPATING",
    "203": "NON_PARTICIPATING",
    "204": "NOT_ALIVE",
    "205": "NOT_ALIVE",
    "206": "NOT_ALIVE",
    "207": "NOT_ALIVE",
    "208": "NOT_ALIVE",
    "209": "NON_PARTICIPATING",
    "21": "SUCCESS",
    "210": "NOT_ALIVE",
    "211": "NON_PARTICIPATING",
    "212": "NON_PARTICIPATING",
    "213": "NON_PARTICIPATING",
    "214": "NON_PARTICIPATING",
    "215": "NOT_ALIVE",
    "216": "SUCCESS",
    "217": "SUCCESS",
    "218": "NON_PARTICIPATING",
    "219": "SUCCESS",
    "22": "NOT_ALIVE",
    "220": "NOT_ALIVE",
    "221": "NOT_ALIVE",
    "222": "NON_PARTICIPATING",
    "223": "NOT_ALIVE",
    "224": "NOT_ALIVE",
    "225": "NOT_ALIVE",
    "226": "NON_PARTICIPATING",
    "227": "NON_PARTICIPATING",
    "228": "SUCCESS",
    "229": "NOT_ALIVE",
    "23": "SUCCESS",
    "230": "SUCCESS",
    "231": "NOT_ALIVE",
    "232": "NOT_ALIVE",
    "233": "NON_PARTICIPATING",
    "234": "NOT_ALIVE",
    "235": "NON_PARTICIPATING",
    "236": "NOT_ALIVE",
    "237": "NON_PARTICIPATING",
    "238": "NON_PARTICIPATING",
    "239": "NON_PARTICIPATING",
    "24": "NOT_ALIVE",
    "240": "NOT_ALIVE",
    "241": "NON_PARTICIPATING",
    "242": "SUCCESS",
    "243": "NOT_ALIVE",
    "244": "NON_PARTICIPATING",
    "245": "NON_PARTICIPATING",
    "246": "NON_PARTICIPATING",
    "247": "NOT_ALIVE",
    "248": "NOT_ALIVE",
    "249": "NOT_ALIVE",
    "25": "SUCCESS",
    "250": "NOT_ALIVE",
    "251": "NON_PARTICIPATING",
    "252": "NON_PARTICIPATING",
    "253": "NON_PARTICIPATING",
    "254": "NON_PARTICIPATING",
    "255": "NON_PARTICIPATING",
    "26": "NON_PARTICIPATING",
    "27": "NOT_ALIVE",
    "28": "NON_PARTICIPATING",
    "29": "NOT_ALIVE",
    "3": "SUCCESS",
    "30": "NOT_ALIVE",
    "31": "NOT_ALIVE",
    "32": "FAIL",
    "33": "NOT_ALIVE",
    "34": "NON_PARTICIPATING",
    "35": "NON_PARTICIPATING",
    "36": "SUCCESS",
    "37": "NOT_ALIVE",
    "38": "NON_PARTICIPATING",
    "39": "NOT_ALIVE",
    "4": "NOT_ALIVE",
    "40": "NON_PARTICIPATING",
    "41": "NOT_ALIVE",
    "42": "NOT_ALIVE",
    "43": "NON_PARTICIPATING",
    "44": "NON_PARTICIPATING",
    "45": "NOT_ALIVE",
    "46": "SUCCESS",
    "47": "NOT_ALIVE",
    "48": "NON_PARTICIPATING",
    "49": "NOT_ALIVE",
    "5": "NOT_ALIVE",
    "50": "SUCCESS",
    "51": "NOT_ALIVE",
    "52": "NOT_ALIVE",
    "53": "NON_PARTICIPATING",
    "54": "NOT_ALIVE",
    "55": "NON_PARTICIPATING",
    "56": "NOT_ALIVE",
    "57": "NON_PARTICIPATING",
    "58": "NOT_ALIVE",
    "59": "NON_PARTICIPATING",
    "6": "NOT_ALIVE",
    "60": "NOT_ALIVE",
    "61": "NON_PARTICIPATING",
    "62": "NOT_ALIVE",
    "63": "FAIL",
    "64": "NOT_ALIVE",
    "65": "NON_PARTICIPATING",
    "66": "NON_PARTICIPATING",
    "67": "NON_PARTICIPATING",
    "68": "NOT_ALIVE",
    "69": "NOT_ALIVE",
    "7": "NOT_ALIVE",
    "70": "NOT_ALIVE",
    "71": "NON_PARTICIPATING",
    "72": "NOT_ALIVE",
    "73": "NOT_ALIVE",
    "74": "NOT_ALIVE",
    "75": "NON_PARTICIPATING",
    "76": "NOT_ALIVE",
    "77": "NOT_ALIVE",
    "78": "NON_PARTICIPATING",
    "79": "NOT_ALIVE",
    "8": "NOT_ALIVE",
    "80": "SUCCESS",
    "81": "NON_PARTICIPATING",
    "82": "NOT_ALIVE",
    "83": "FAIL",
    "84": "NOT_ALIVE",
    "85": "NOT_ALIVE",
    "86": "NOT_ALIVE",
    "87": "NOT_ALIVE",
    "88": "NON_PARTICIPATING",
    "89": "NOT_ALIVE",
    "9": "NOT_ALIVE",
    "90": "NOT_ALIVE",
    "91": "NOT_ALIVE",
    "92": "NOT_ALIVE",
    "93": "NON_PARTICIPATING",
    "94": "NON_PARTICIPATING",
    "95": "NOT_ALIVE",
    "96": "NOT_ALIVE",
    "97": "NON_PARTICIPATING",
    "98": "NON_PARTICIPATING",
    "99": "SUCCESS"
  },
  "architectures": [
    "LlamaForCausalLM"
  ],
  "attention_bias": false,
  "attention_dropout": 0.0,
  "block_list": [
    6140868,
    6140887
  ],
  "bos_token_id": 1,
  "eos_token_id": 2,
  "hidden_act": "silu",
  "hidden_size": 2048,
  "initializer_range": 0.02,
  "inner_step": 104,
  "intermediate_size": 5632,
  "last_allreduce_block": 6140120,
  "max_position_embeddings": 2048,
  "mlp_bias": false,
  "model_type": "llama",
  "num_attention_heads": 32,
  "num_hidden_layers": 22,
  "num_key_value_heads": 4,
  "pretraining_tp": 1,
  "rms_norm_eps": 1e-05,
  "rope_scaling": null,
  "rope_theta": 10000.0,
  "tie_word_embeddings": false,
  "torch_dtype": "float32",
  "transformers_version": "4.39.3",
  "use_cache": false,
  "vocab_size": 32000
}