张瀚灵 commited on
Commit
ee7c54e
·
1 Parent(s): a748607

init commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
mlc-chat-config.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "asvd_phi",
3
+ "quantization": "q0f16",
4
+ "model_config": {
5
+ "vocab_size": 51200,
6
+ "hidden_size": 2560,
7
+ "intermediate_size": 10240,
8
+ "num_hidden_layers": 32,
9
+ "num_attention_heads": 32,
10
+ "layer_norm_eps": 1e-05,
11
+ "position_embedding_base": 10000.0,
12
+ "partial_rotary_factor": 0.4,
13
+ "num_key_value_heads": 32,
14
+ "context_window_size": 2048,
15
+ "prefill_chunk_size": 2048,
16
+ "head_dim": 80,
17
+ "truncation_ranks": {
18
+ "model.layers.0.mlp.fc1": [
19
+ 1475,
20
+ 163
21
+ ],
22
+ "model.layers.0.mlp.fc2": [
23
+ 1290,
24
+ 143
25
+ ],
26
+ "model.layers.0.self_attn.dense": [
27
+ 922,
28
+ 102
29
+ ],
30
+ "model.layers.0.self_attn.k_proj": [
31
+ 922,
32
+ 102
33
+ ],
34
+ "model.layers.0.self_attn.q_proj": [
35
+ 807,
36
+ 89
37
+ ],
38
+ "model.layers.1.mlp.fc1": [
39
+ 1475,
40
+ 163
41
+ ],
42
+ "model.layers.1.self_attn.dense": [
43
+ 807,
44
+ 89
45
+ ],
46
+ "model.layers.1.self_attn.k_proj": [
47
+ 692,
48
+ 76
49
+ ],
50
+ "model.layers.1.self_attn.v_proj": [
51
+ 576,
52
+ 64
53
+ ],
54
+ "model.layers.10.self_attn.q_proj": [
55
+ 692,
56
+ 76
57
+ ],
58
+ "model.layers.11.self_attn.dense": [
59
+ 461,
60
+ 51
61
+ ],
62
+ "model.layers.11.self_attn.v_proj": [
63
+ 692,
64
+ 76
65
+ ],
66
+ "model.layers.12.self_attn.dense": [
67
+ 1037,
68
+ 115
69
+ ],
70
+ "model.layers.12.self_attn.q_proj": [
71
+ 807,
72
+ 89
73
+ ],
74
+ "model.layers.12.self_attn.v_proj": [
75
+ 922,
76
+ 102
77
+ ],
78
+ "model.layers.13.self_attn.dense": [
79
+ 807,
80
+ 89
81
+ ],
82
+ "model.layers.13.self_attn.q_proj": [
83
+ 576,
84
+ 64
85
+ ],
86
+ "model.layers.14.self_attn.dense": [
87
+ 1037,
88
+ 115
89
+ ],
90
+ "model.layers.14.self_attn.q_proj": [
91
+ 807,
92
+ 89
93
+ ],
94
+ "model.layers.15.self_attn.dense": [
95
+ 1037,
96
+ 115
97
+ ],
98
+ "model.layers.15.self_attn.q_proj": [
99
+ 807,
100
+ 89
101
+ ],
102
+ "model.layers.16.self_attn.dense": [
103
+ 922,
104
+ 102
105
+ ],
106
+ "model.layers.16.self_attn.k_proj": [
107
+ 922,
108
+ 102
109
+ ],
110
+ "model.layers.17.self_attn.dense": [
111
+ 1037,
112
+ 115
113
+ ],
114
+ "model.layers.17.self_attn.q_proj": [
115
+ 807,
116
+ 89
117
+ ],
118
+ "model.layers.18.self_attn.dense": [
119
+ 922,
120
+ 102
121
+ ],
122
+ "model.layers.19.self_attn.k_proj": [
123
+ 922,
124
+ 102
125
+ ],
126
+ "model.layers.19.self_attn.v_proj": [
127
+ 922,
128
+ 102
129
+ ],
130
+ "model.layers.2.mlp.fc1": [
131
+ 1475,
132
+ 163
133
+ ],
134
+ "model.layers.2.self_attn.dense": [
135
+ 807,
136
+ 89
137
+ ],
138
+ "model.layers.2.self_attn.k_proj": [
139
+ 1037,
140
+ 115
141
+ ],
142
+ "model.layers.2.self_attn.q_proj": [
143
+ 461,
144
+ 51
145
+ ],
146
+ "model.layers.20.self_attn.dense": [
147
+ 807,
148
+ 89
149
+ ],
150
+ "model.layers.20.self_attn.q_proj": [
151
+ 1037,
152
+ 115
153
+ ],
154
+ "model.layers.21.self_attn.dense": [
155
+ 692,
156
+ 76
157
+ ],
158
+ "model.layers.21.self_attn.q_proj": [
159
+ 1037,
160
+ 115
161
+ ],
162
+ "model.layers.22.self_attn.dense": [
163
+ 1037,
164
+ 115
165
+ ],
166
+ "model.layers.22.self_attn.q_proj": [
167
+ 807,
168
+ 89
169
+ ],
170
+ "model.layers.23.self_attn.dense": [
171
+ 922,
172
+ 102
173
+ ],
174
+ "model.layers.23.self_attn.q_proj": [
175
+ 922,
176
+ 102
177
+ ],
178
+ "model.layers.23.self_attn.v_proj": [
179
+ 692,
180
+ 76
181
+ ],
182
+ "model.layers.24.self_attn.dense": [
183
+ 461,
184
+ 51
185
+ ],
186
+ "model.layers.24.self_attn.q_proj": [
187
+ 922,
188
+ 102
189
+ ],
190
+ "model.layers.25.self_attn.k_proj": [
191
+ 576,
192
+ 64
193
+ ],
194
+ "model.layers.26.self_attn.k_proj": [
195
+ 922,
196
+ 102
197
+ ],
198
+ "model.layers.26.self_attn.q_proj": [
199
+ 1037,
200
+ 115
201
+ ],
202
+ "model.layers.27.self_attn.dense": [
203
+ 692,
204
+ 76
205
+ ],
206
+ "model.layers.27.self_attn.k_proj": [
207
+ 807,
208
+ 89
209
+ ],
210
+ "model.layers.28.self_attn.dense": [
211
+ 576,
212
+ 64
213
+ ],
214
+ "model.layers.28.self_attn.q_proj": [
215
+ 922,
216
+ 102
217
+ ],
218
+ "model.layers.29.self_attn.q_proj": [
219
+ 922,
220
+ 102
221
+ ],
222
+ "model.layers.29.self_attn.v_proj": [
223
+ 807,
224
+ 89
225
+ ],
226
+ "model.layers.3.self_attn.dense": [
227
+ 922,
228
+ 102
229
+ ],
230
+ "model.layers.3.self_attn.k_proj": [
231
+ 1037,
232
+ 115
233
+ ],
234
+ "model.layers.30.self_attn.dense": [
235
+ 922,
236
+ 102
237
+ ],
238
+ "model.layers.30.self_attn.q_proj": [
239
+ 807,
240
+ 89
241
+ ],
242
+ "model.layers.31.self_attn.dense": [
243
+ 1037,
244
+ 115
245
+ ],
246
+ "model.layers.31.self_attn.q_proj": [
247
+ 1037,
248
+ 115
249
+ ],
250
+ "model.layers.4.self_attn.dense": [
251
+ 807,
252
+ 89
253
+ ],
254
+ "model.layers.4.self_attn.k_proj": [
255
+ 1037,
256
+ 115
257
+ ],
258
+ "model.layers.4.self_attn.q_proj": [
259
+ 692,
260
+ 76
261
+ ],
262
+ "model.layers.5.self_attn.k_proj": [
263
+ 1037,
264
+ 115
265
+ ],
266
+ "model.layers.5.self_attn.q_proj": [
267
+ 807,
268
+ 89
269
+ ],
270
+ "model.layers.6.self_attn.dense": [
271
+ 807,
272
+ 89
273
+ ],
274
+ "model.layers.6.self_attn.q_proj": [
275
+ 922,
276
+ 102
277
+ ],
278
+ "model.layers.7.self_attn.q_proj": [
279
+ 692,
280
+ 76
281
+ ],
282
+ "model.layers.8.self_attn.dense": [
283
+ 1037,
284
+ 115
285
+ ],
286
+ "model.layers.8.self_attn.k_proj": [
287
+ 692,
288
+ 76
289
+ ],
290
+ "model.layers.8.self_attn.q_proj": [
291
+ 1037,
292
+ 115
293
+ ],
294
+ "model.layers.9.self_attn.dense": [
295
+ 692,
296
+ 76
297
+ ],
298
+ "model.layers.9.self_attn.q_proj": [
299
+ 576,
300
+ 64
301
+ ]
302
+ },
303
+ "tensor_parallel_shards": 1
304
+ },
305
+ "vocab_size": 51200,
306
+ "context_window_size": 2048,
307
+ "sliding_window_size": -1,
308
+ "prefill_chunk_size": 2048,
309
+ "attention_sink_size": -1,
310
+ "tensor_parallel_shards": 1,
311
+ "max_batch_size": 80,
312
+ "mean_gen_len": 128,
313
+ "max_gen_len": 512,
314
+ "shift_fill_factor": 0.3,
315
+ "temperature": 1.0,
316
+ "repetition_penalty": 1.0,
317
+ "top_p": 0.5,
318
+ "conv_template": "LM",
319
+ "pad_token_id": 0,
320
+ "bos_token_id": 50256,
321
+ "eos_token_id": 50256,
322
+ "tokenizer_files": [
323
+ "tokenizer.json",
324
+ "vocab.json",
325
+ "merges.txt",
326
+ "added_tokens.json",
327
+ "tokenizer_config.json"
328
+ ],
329
+ "version": "0.1.0"
330
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76772b626f4941182e17194c49c813293a6c930e99f41c4266bbf320331364bc
3
+ size 262144000
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeed8f9939e08ec774f4b20528a5dcae4145adcd9d6d733b7eb878593f42558c
3
+ size 52428800
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16183de51d729da63f07cedf57d7d38447200758e6937fb475b5bf8aec50994f
3
+ size 52428800
params_shard_100.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93fb492e0e0ff5b6bfa72fd65b6664291a8d936555449f209daaa2956ab8df1d
3
+ size 52428800
params_shard_101.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d776d394839d5005822818435f05792a5f809831fc313bd90a3aa5d65007b4d7
3
+ size 52428800
params_shard_102.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fa323830a21bc65e1afe9d5cb9b8de9b43f20a5d6bb402bfda45dfa67abfb54
3
+ size 21611520
params_shard_103.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9df31839a15b95359b0edaf7824d85b862300279655028d7436081ea7ae45dd9
3
+ size 29501440
params_shard_104.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76a816336979b2eeeac405f248dc84b96758ac26332027fbe5638a9a343097e2
3
+ size 52428800
params_shard_105.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:370b614a4beecb577d66804d6f567d002a15de7324a0919969379b56b751b5c4
3
+ size 52428800
params_shard_106.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b84c00a1b1bd7284ce34018fec9ceef136b55ffdf3e6475649a35804718a29b
3
+ size 26920960
params_shard_107.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e8e2f77488a99d3fe64f4bd723974d20f3446daeeaf001b7558ccbf78c22c19
3
+ size 23603200
params_shard_108.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3aeccd2080c6b511a8d0680cd8f001d1fa2a41b034e6119709d50d5b330ed12
3
+ size 52428800
params_shard_109.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a25b7b6656cc7f0a209fa03657592e8261f978b0b306fda4df6e4eb250c555e7
3
+ size 52428800
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c0b18a42582a54820c88cf8e3043793d7dbb4027f7bb3c22cb382533215ba1c
3
+ size 52428800
params_shard_110.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bcd3fb89ff881fb3015441c39abb8c461540ec939a92db1731eecfb02909c58
3
+ size 26260480
params_shard_111.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e79a45e42f03563c4641ff230f59bd2c0fc12386b53d3030b2b6a841ea5aafc
3
+ size 20981760
params_shard_112.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:286dd1c2d7d9e153a38cefb894b53902500e015281fb6a0633e279d4472c236d
3
+ size 52428800
params_shard_113.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b5ed4b65683ccdb7889d33db459e0b4d3730a208c89db1ad8341df6291c845e
3
+ size 52428800
params_shard_114.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2292240d858315ab0f39e89511d05edcf8d96d516692789a6210a31aee37c417
3
+ size 32814080
params_shard_115.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5f8e1961ad40c75662eb949f40e402f4779728225c1e241dfb68e01b55b36e2
3
+ size 52428800
params_shard_116.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e57b7cbef72448dcc4a0c5d0dd6e6601eb59071537314c8485d1f2920e8e27d
3
+ size 52428800
params_shard_117.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30ade731a0d9a731594b224b362e6e76887a8d04f71cf9997dc5aa40f355d253
3
+ size 32824320
params_shard_118.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:376906f61bf403d59b67e033ca11cf41eaa51f93a9917e40fa25c653263484a7
3
+ size 32778240
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fb8aa4a34ebc1a8bb239ab95baf1c9a99c8086e716ced8969d25966ad1dda15
3
+ size 32819200
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1dda264a34ba2a65adaca8744b812892498818da9e0b67b61039b73084ff9a1
3
+ size 20981760
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c02ae6e577c9fb237a89852f1caebd1a4e0b2df46c22fb4b647bec411df60002
3
+ size 52428800
params_shard_15.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a22bc57bf53f9dadea0bf6b81b1bac140afc6e4a17426f5053c5b1311b25518e
3
+ size 52428800
params_shard_16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4089bfceefa8c4a62127142bb5df044a0ccc583c815639652b9945e25589007b
3
+ size 31508480
params_shard_17.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ffde4961a1fa907fe4995b5ac415087b1d65cbb3cef25102841050bb62f0de3
3
+ size 52428800
params_shard_18.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7dcfc0a1de379ee30df31eeda004bdec8d05529b388bff4165eabab0043c4cd
3
+ size 52428800
params_shard_19.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc76c900df2a7729f968e89390b0db71941c6e580f7d1ccbb5be240fca6859bb
3
+ size 32819200
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:308d082548ba0bac75d949a779225c5aded8cbd091d4f297e7d9e9beb4f01fc2
3
+ size 262144000
params_shard_20.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa5d994175a60d5c9de27963076ec3626f31791e75e5f43adbf9b69091e6243f
3
+ size 52428800
params_shard_21.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46efc839fe87a3cae558933e3046009a90c11da7fc93344b32c58ae5ff0e4556
3
+ size 52428800
params_shard_22.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b92df04d07dab90fe0181aa3482e87ee4d5f044eec0eedbc1d51d3a1c31c191d
3
+ size 32814080
params_shard_23.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbee17aef27542ac668cc323759b75c354ae07666b00af7ff23c038870ce9286
3
+ size 28856320
params_shard_24.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:691c1b56e352707c351e51119be5278bc38c2d8128fdf4ab2abd7537a7a07c60
3
+ size 52428800
params_shard_25.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c36c7522722420bff4c38abcedb95c124b2e6f626f182154809ded56d224901f
3
+ size 52428800
params_shard_26.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:056b49043ccc291a16eba0506b5bacf9e4f6e7c64786fd4d58ceccb95f79f15a
3
+ size 24949760
params_shard_27.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f806ca92e1cdec9a383caf6e0f1f9d3f08210c12ef1ad4dcb2ef51368ba9195a
3
+ size 22292480
params_shard_28.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:061983a4beaf49e6d215c216c6b413ac2171b43790363c5ec7d9b3699b1a2bc3
3
+ size 52428800
params_shard_29.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3a365a5a5fe47a56fb8ff1f2c777fc867ec171f7046087a971893a0ae3186bf
3
+ size 52428800
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:497b234ee8017c00becca5c6871b4fb2ad199a67b554697da8962f6a45fb1ade
3
+ size 30366720
params_shard_30.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27059127c20b97f45c9b4f6f60ad2496a427b13dc93b11c30cadbb5232db6a17
3
+ size 24949760
params_shard_31.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6edb4bf3b3be0ff1c41f98f89ebaa26272d7a3d30f7bb9c2987ba49717c1760a
3
+ size 22292480
params_shard_32.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef556acd0cfbf9564f5c50139682b82eeb2040a5a54aed1034383b6c01b719bf
3
+ size 52428800