bdubayah commited on
Commit
5b81455
·
verified ·
1 Parent(s): c9fd570

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. hf_quant_config.json +232 -49
  2. model-00002-of-000163.safetensors +2 -2
  3. model-00003-of-000163.safetensors +2 -2
  4. model-00004-of-000163.safetensors +2 -2
  5. model-00005-of-000163.safetensors +2 -2
  6. model-00006-of-000163.safetensors +2 -2
  7. model-00007-of-000163.safetensors +2 -2
  8. model-00008-of-000163.safetensors +2 -2
  9. model-00009-of-000163.safetensors +2 -2
  10. model-00010-of-000163.safetensors +2 -2
  11. model-00011-of-000163.safetensors +2 -2
  12. model-00012-of-000163.safetensors +2 -2
  13. model-00013-of-000163.safetensors +2 -2
  14. model-00014-of-000163.safetensors +2 -2
  15. model-00015-of-000163.safetensors +2 -2
  16. model-00016-of-000163.safetensors +2 -2
  17. model-00017-of-000163.safetensors +2 -2
  18. model-00018-of-000163.safetensors +2 -2
  19. model-00019-of-000163.safetensors +2 -2
  20. model-00020-of-000163.safetensors +2 -2
  21. model-00021-of-000163.safetensors +2 -2
  22. model-00022-of-000163.safetensors +2 -2
  23. model-00023-of-000163.safetensors +2 -2
  24. model-00024-of-000163.safetensors +2 -2
  25. model-00025-of-000163.safetensors +2 -2
  26. model-00026-of-000163.safetensors +2 -2
  27. model-00027-of-000163.safetensors +2 -2
  28. model-00028-of-000163.safetensors +2 -2
  29. model-00030-of-000163.safetensors +2 -2
  30. model-00031-of-000163.safetensors +2 -2
  31. model-00032-of-000163.safetensors +2 -2
  32. model-00033-of-000163.safetensors +2 -2
  33. model-00034-of-000163.safetensors +2 -2
  34. model-00035-of-000163.safetensors +2 -2
  35. model-00036-of-000163.safetensors +2 -2
  36. model-00037-of-000163.safetensors +2 -2
  37. model-00038-of-000163.safetensors +2 -2
  38. model-00039-of-000163.safetensors +2 -2
  39. model-00041-of-000163.safetensors +2 -2
  40. model-00042-of-000163.safetensors +2 -2
  41. model-00043-of-000163.safetensors +2 -2
  42. model-00044-of-000163.safetensors +2 -2
  43. model-00045-of-000163.safetensors +2 -2
  44. model-00046-of-000163.safetensors +2 -2
  45. model-00047-of-000163.safetensors +2 -2
  46. model-00048-of-000163.safetensors +2 -2
  47. model-00049-of-000163.safetensors +2 -2
  48. model-00050-of-000163.safetensors +2 -2
  49. model-00052-of-000163.safetensors +2 -2
  50. model-00054-of-000163.safetensors +2 -2
hf_quant_config.json CHANGED
@@ -1,75 +1,258 @@
1
  {
2
  "producer": {
3
  "name": "modelopt",
4
- "version": "0.29.0"
5
  },
6
  "quantization": {
7
  "quant_algo": "NVFP4",
8
  "kv_cache_quant_algo": null,
9
  "group_size": 16,
10
  "exclude_modules": [
11
- "lm_head",
12
  "model.layers.0.self_attn*",
13
- "model.layers.1.self_attn*",
14
- "model.layers.10.self_attn*",
15
- "model.layers.11.self_attn*",
16
- "model.layers.12.self_attn*",
 
 
 
 
 
 
17
  "model.layers.13.self_attn*",
18
- "model.layers.14.self_attn*",
19
- "model.layers.15.self_attn*",
20
- "model.layers.16.self_attn*",
21
- "model.layers.17.self_attn*",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "model.layers.18.self_attn*",
23
- "model.layers.19.self_attn*",
 
 
 
 
 
 
 
24
  "model.layers.2.self_attn*",
25
- "model.layers.20.self_attn*",
26
- "model.layers.21.self_attn*",
27
- "model.layers.22.self_attn*",
 
 
 
 
28
  "model.layers.23.self_attn*",
29
- "model.layers.24.self_attn*",
30
- "model.layers.25.self_attn*",
31
- "model.layers.26.self_attn*",
32
- "model.layers.27.self_attn*",
33
- "model.layers.28.self_attn*",
34
- "model.layers.29.self_attn*",
35
- "model.layers.3.self_attn*",
36
- "model.layers.30.self_attn*",
37
- "model.layers.31.self_attn*",
38
- "model.layers.32.self_attn*",
39
- "model.layers.33.self_attn*",
40
- "model.layers.34.self_attn*",
41
  "model.layers.35.self_attn*",
42
- "model.layers.36.self_attn*",
43
- "model.layers.37.self_attn*",
 
 
 
 
 
 
 
 
 
 
44
  "model.layers.38.self_attn*",
45
- "model.layers.39.self_attn*",
46
- "model.layers.4.self_attn*",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  "model.layers.40.self_attn*",
48
- "model.layers.41.self_attn*",
49
  "model.layers.42.self_attn*",
50
- "model.layers.43.self_attn*",
 
51
  "model.layers.44.self_attn*",
52
- "model.layers.45.self_attn*",
53
- "model.layers.46.self_attn*",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  "model.layers.47.self_attn*",
55
- "model.layers.48.self_attn*",
56
- "model.layers.49.self_attn*",
57
- "model.layers.5.self_attn*",
58
- "model.layers.50.self_attn*",
59
- "model.layers.51.self_attn*",
60
- "model.layers.52.self_attn*",
61
  "model.layers.53.self_attn*",
62
- "model.layers.54.self_attn*",
63
- "model.layers.55.self_attn*",
64
  "model.layers.56.self_attn*",
65
- "model.layers.57.self_attn*",
66
- "model.layers.58.self_attn*",
67
- "model.layers.59.self_attn*",
68
- "model.layers.6.self_attn*",
69
- "model.layers.60.self_attn*",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  "model.layers.7.self_attn*",
71
- "model.layers.8.self_attn*",
72
- "model.layers.9.self_attn*"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  ]
74
  }
75
  }
 
1
  {
2
  "producer": {
3
  "name": "modelopt",
4
+ "version": "0.25.0"
5
  },
6
  "quantization": {
7
  "quant_algo": "NVFP4",
8
  "kv_cache_quant_algo": null,
9
  "group_size": 16,
10
  "exclude_modules": [
 
11
  "model.layers.0.self_attn*",
12
+ "model.layers.36.post_attention_layernorm",
13
+ "model.layers.20.self_attn*",
14
+ "model.layers.27.post_attention_layernorm",
15
+ "model.layers.40.input_layernorm",
16
+ "model.layers.27.mlp.gate",
17
+ "model.layers.53.mlp.gate",
18
+ "model.layers.3.post_attention_layernorm",
19
+ "model.layers.39.mlp.gate",
20
+ "model.layers.59.input_layernorm",
21
+ "model.layers.60.self_attn*",
22
  "model.layers.13.self_attn*",
23
+ "model.layers.19.input_layernorm",
24
+ "model.embed_tokens",
25
+ "model.layers.59.mlp.gate",
26
+ "model.layers.52.post_attention_layernorm",
27
+ "model.layers.1.post_attention_layernorm",
28
+ "model.layers.11.self_attn*",
29
+ "model.layers.4.mlp.gate",
30
+ "model.layers.42.post_attention_layernorm",
31
+ "model.layers.33.post_attention_layernorm",
32
+ "model.layers.7.mlp.gate",
33
+ "model.layers.16.mlp.gate",
34
+ "model.layers.29.post_attention_layernorm",
35
+ "model.layers.9.self_attn*",
36
+ "model.layers.5.self_attn*",
37
+ "model.layers.32.mlp.gate",
38
+ "model.layers.6.input_layernorm",
39
+ "model.layers.4.post_attention_layernorm",
40
+ "model.layers.37.mlp.gate",
41
+ "model.layers.8.self_attn*",
42
+ "model.layers.6.post_attention_layernorm",
43
+ "model.layers.2.input_layernorm",
44
+ "model.layers.8.input_layernorm",
45
+ "model.layers.42.input_layernorm",
46
+ "model.norm",
47
+ "model.layers.55.post_attention_layernorm",
48
  "model.layers.18.self_attn*",
49
+ "model.layers.32.post_attention_layernorm",
50
+ "model.layers.20.input_layernorm",
51
+ "model.layers.56.mlp.gate",
52
+ "model.layers.36.input_layernorm",
53
+ "model.layers.17.input_layernorm",
54
+ "model.layers.10.mlp.gate",
55
+ "model.layers.34.post_attention_layernorm",
56
+ "model.layers.49.input_layernorm",
57
  "model.layers.2.self_attn*",
58
+ "model.layers.52.self_attn*",
59
+ "model.layers.15.input_layernorm",
60
+ "model.layers.20.mlp.gate",
61
+ "model.layers.22.post_attention_layernorm",
62
+ "model.layers.45.mlp.gate",
63
+ "model.layers.12.post_attention_layernorm",
64
+ "model.layers.44.input_layernorm",
65
  "model.layers.23.self_attn*",
66
+ "model.layers.1.input_layernorm",
67
+ "model.layers.44.post_attention_layernorm",
68
+ "model.layers.41.post_attention_layernorm",
69
+ "model.layers.50.input_layernorm",
70
+ "model.layers.45.self_attn*",
71
+ "model.layers.19.mlp.gate",
72
+ "model.layers.23.post_attention_layernorm",
73
+ "model.layers.59.post_attention_layernorm",
74
+ "model.layers.46.mlp.gate",
75
+ "model.layers.58.self_attn*",
76
+ "model.layers.3.input_layernorm",
77
+ "model.layers.39.post_attention_layernorm",
78
  "model.layers.35.self_attn*",
79
+ "model.layers.48.input_layernorm",
80
+ "model.layers.34.mlp.gate",
81
+ "model.layers.53.input_layernorm",
82
+ "model.layers.49.post_attention_layernorm",
83
+ "model.layers.32.self_attn*",
84
+ "model.layers.14.mlp.gate",
85
+ "model.layers.56.input_layernorm",
86
+ "model.layers.21.input_layernorm",
87
+ "model.layers.55.self_attn*",
88
+ "model.layers.13.post_attention_layernorm",
89
+ "model.layers.29.input_layernorm",
90
+ "model.layers.51.input_layernorm",
91
  "model.layers.38.self_attn*",
92
+ "model.layers.7.input_layernorm",
93
+ "model.layers.9.mlp.gate",
94
+ "model.layers.34.self_attn*",
95
+ "model.layers.40.post_attention_layernorm",
96
+ "model.layers.46.self_attn*",
97
+ "model.layers.47.input_layernorm",
98
+ "model.layers.48.post_attention_layernorm",
99
+ "model.layers.18.input_layernorm",
100
+ "model.layers.0.input_layernorm",
101
+ "model.layers.30.self_attn*",
102
+ "model.layers.38.input_layernorm",
103
+ "model.layers.12.self_attn*",
104
+ "model.layers.61*",
105
+ "model.layers.60.mlp.gate",
106
+ "model.layers.14.self_attn*",
107
+ "model.layers.17.mlp.gate",
108
  "model.layers.40.self_attn*",
 
109
  "model.layers.42.self_attn*",
110
+ "model.layers.16.input_layernorm",
111
+ "model.layers.52.input_layernorm",
112
  "model.layers.44.self_attn*",
113
+ "model.layers.24.post_attention_layernorm",
114
+ "model.layers.29.mlp.gate",
115
+ "model.layers.28.post_attention_layernorm",
116
+ "model.layers.35.input_layernorm",
117
+ "model.layers.39.input_layernorm",
118
+ "model.layers.8.mlp.gate",
119
+ "model.layers.6.self_attn*",
120
+ "model.layers.56.post_attention_layernorm",
121
+ "model.layers.40.mlp.gate",
122
+ "model.layers.31.input_layernorm",
123
+ "model.layers.26.self_attn*",
124
+ "model.layers.34.input_layernorm",
125
+ "model.layers.31.mlp.gate",
126
+ "model.layers.28.input_layernorm",
127
+ "model.layers.4.input_layernorm",
128
+ "model.layers.24.self_attn*",
129
+ "model.layers.13.input_layernorm",
130
+ "model.layers.33.mlp.gate",
131
+ "model.layers.4.self_attn*",
132
+ "model.layers.51.mlp.gate",
133
+ "model.layers.57.mlp.gate",
134
+ "model.layers.55.input_layernorm",
135
+ "model.layers.57.self_attn*",
136
+ "model.layers.58.input_layernorm",
137
+ "model.layers.8.post_attention_layernorm",
138
+ "model.layers.10.post_attention_layernorm",
139
+ "model.layers.32.input_layernorm",
140
+ "model.layers.3.mlp.gate",
141
+ "model.layers.17.self_attn*",
142
+ "model.layers.59.self_attn*",
143
+ "model.layers.45.post_attention_layernorm",
144
+ "model.layers.41.input_layernorm",
145
+ "model.layers.0.post_attention_layernorm",
146
+ "model.layers.26.input_layernorm",
147
+ "model.layers.43.input_layernorm",
148
+ "model.layers.54.input_layernorm",
149
+ "model.layers.26.mlp.gate",
150
+ "model.layers.38.post_attention_layernorm",
151
+ "model.layers.55.mlp.gate",
152
+ "model.layers.24.input_layernorm",
153
+ "model.layers.52.mlp.gate",
154
+ "model.layers.37.self_attn*",
155
+ "model.layers.9.post_attention_layernorm",
156
+ "model.layers.5.post_attention_layernorm",
157
+ "model.layers.19.self_attn*",
158
+ "model.layers.37.post_attention_layernorm",
159
+ "model.layers.19.post_attention_layernorm",
160
+ "model.layers.25.post_attention_layernorm",
161
+ "model.layers.29.self_attn*",
162
+ "model.layers.23.input_layernorm",
163
  "model.layers.47.self_attn*",
 
 
 
 
 
 
164
  "model.layers.53.self_attn*",
165
+ "model.layers.36.mlp.gate",
166
+ "model.layers.43.post_attention_layernorm",
167
  "model.layers.56.self_attn*",
168
+ "model.layers.21.post_attention_layernorm",
169
+ "model.layers.22.self_attn*",
170
+ "model.layers.24.mlp.gate",
171
+ "model.layers.16.post_attention_layernorm",
172
+ "model.layers.14.post_attention_layernorm",
173
+ "model.layers.15.self_attn*",
174
+ "model.layers.10.input_layernorm",
175
+ "model.layers.25.mlp.gate",
176
+ "lm_head",
177
+ "model.layers.28.mlp.gate",
178
+ "model.layers.23.mlp.gate",
179
+ "model.layers.18.mlp.gate",
180
+ "model.layers.46.post_attention_layernorm",
181
+ "model.layers.35.mlp.gate",
182
+ "model.layers.41.mlp.gate",
183
+ "model.layers.54.mlp.gate",
184
+ "model.layers.26.post_attention_layernorm",
185
+ "model.layers.44.mlp.gate",
186
+ "model.layers.43.self_attn*",
187
+ "model.layers.41.self_attn*",
188
+ "model.layers.28.self_attn*",
189
+ "model.layers.51.self_attn*",
190
+ "model.layers.43.mlp.gate",
191
+ "model.layers.18.post_attention_layernorm",
192
+ "model.layers.33.self_attn*",
193
+ "model.layers.54.post_attention_layernorm",
194
+ "model.layers.5.mlp.gate",
195
+ "model.layers.47.post_attention_layernorm",
196
+ "model.layers.50.self_attn*",
197
+ "model.layers.42.mlp.gate",
198
+ "model.layers.31.self_attn*",
199
  "model.layers.7.self_attn*",
200
+ "model.layers.25.input_layernorm",
201
+ "model.layers.21.self_attn*",
202
+ "model.layers.60.post_attention_layernorm",
203
+ "model.layers.30.mlp.gate",
204
+ "model.layers.39.self_attn*",
205
+ "model.layers.21.mlp.gate",
206
+ "model.layers.2.post_attention_layernorm",
207
+ "model.layers.48.mlp.gate",
208
+ "model.layers.6.mlp.gate",
209
+ "model.layers.15.post_attention_layernorm",
210
+ "model.layers.20.post_attention_layernorm",
211
+ "model.layers.57.input_layernorm",
212
+ "model.layers.11.post_attention_layernorm",
213
+ "model.layers.37.input_layernorm",
214
+ "model.layers.57.post_attention_layernorm",
215
+ "model.layers.48.self_attn*",
216
+ "model.layers.33.input_layernorm",
217
+ "model.layers.15.mlp.gate",
218
+ "model.layers.25.self_attn*",
219
+ "model.layers.16.self_attn*",
220
+ "model.layers.50.mlp.gate",
221
+ "model.layers.31.post_attention_layernorm",
222
+ "model.layers.3.self_attn*",
223
+ "model.layers.49.mlp.gate",
224
+ "model.layers.5.input_layernorm",
225
+ "model.layers.1.self_attn*",
226
+ "model.layers.58.post_attention_layernorm",
227
+ "model.layers.53.post_attention_layernorm",
228
+ "model.layers.10.self_attn*",
229
+ "model.layers.30.post_attention_layernorm",
230
+ "model.layers.38.mlp.gate",
231
+ "model.layers.47.mlp.gate",
232
+ "model.layers.27.input_layernorm",
233
+ "model.layers.36.self_attn*",
234
+ "model.layers.58.mlp.gate",
235
+ "model.layers.54.self_attn*",
236
+ "model.layers.46.input_layernorm",
237
+ "model.layers.30.input_layernorm",
238
+ "model.layers.13.mlp.gate",
239
+ "model.layers.7.post_attention_layernorm",
240
+ "model.layers.49.self_attn*",
241
+ "model.layers.14.input_layernorm",
242
+ "model.layers.45.input_layernorm",
243
+ "model.layers.11.input_layernorm",
244
+ "model.layers.17.post_attention_layernorm",
245
+ "model.layers.50.post_attention_layernorm",
246
+ "model.layers.35.post_attention_layernorm",
247
+ "model.layers.12.input_layernorm",
248
+ "model.layers.22.mlp.gate",
249
+ "model.layers.22.input_layernorm",
250
+ "model.layers.27.self_attn*",
251
+ "model.layers.11.mlp.gate",
252
+ "model.layers.60.input_layernorm",
253
+ "model.layers.51.post_attention_layernorm",
254
+ "model.layers.9.input_layernorm",
255
+ "model.layers.12.mlp.gate"
256
  ]
257
  }
258
  }
model-00002-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5d1bcb509a4ce0a703a8e07286cf9d17f011bc75516bb9bf66eecbff96f498e
3
- size 2419596008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f9e4f9cf7d1c6a573bcbe674ee11aa9b90ab9685b36f80be26f635c48d418f8
3
+ size 2419597576
model-00003-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:01d9a787411729684af7902d47dc78912e9e8dbc077527681735a90fd132d36f
3
- size 2419596824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0777a86a47fc418c1ef322c33691386522d333b3b404df3b61ec239b45cd1a91
3
+ size 2419598384
model-00004-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:787fa3d30882520d6f158af5c5a7065d8deb5f7ff5d6c0836c959a6847bf1c03
3
- size 2690157656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5db1b7a614cea16547de4789ef5e612a3cb66ff7c732b53d2d1ef3ae7680bd10
3
+ size 2690159256
model-00005-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eb38ce129fdff3a4dcb6b00fd1439a8bfbf3ed35ebe92ba1d0bbec14d7f32885
3
- size 2419596384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7ce4dca046a710094a1ea2ab2dddd293219c6298e813a9e3b1226466f627211
3
+ size 2419597944
model-00006-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:17c25d69b98a85b684f5b9ade89761078cd7d0670bb4765784f003b86b82d156
3
- size 2727777304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53d66986b8a982abe9f1aa790c3b0e4e83747c4dbeb35a6c4f385345b3ff42b2
3
+ size 2727778936
model-00007-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d3972ce05df1042d327f82bc6f5866c4a2995c09c6aca9a4e7c6a1783ff7c9b
3
- size 2423266840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42c7f813aa615623be982858bbe3ec8dbf9a7280090c235a96e33f7f0b82a3b1
3
+ size 2423268408
model-00008-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:86277400e52dc1c5481f0c70646adb2ac59d3c34d13e196d66e383f00c6fd9fa
3
- size 2419596792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0758339adcb747843bb85f51b06395e5e9cfb776c50e44576b4741e51b9d6419
3
+ size 2419598352
model-00009-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:93a00d4ce0002c4967623b843347d175bdb8fa049bbc3d00a8291375a30d3d56
3
- size 2690158040
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc5630931ef9a4cf3e73affd90373775acc1f796b5228fa6bbcfabb0f0d4bba2
3
+ size 2690159640
model-00010-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20dc1ade1e85faa7664b29456dcb0b20d0a884439fdcba06cf1b5a3e78d5e207
3
- size 2419596000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48e0b921a9968378fb33551f81266d1316181633565793f36006eccb1445321d
3
+ size 2419597560
model-00011-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b5368924cd2b65ae48a0e27c2e7af2c709a277a6bcd889ff369f6805282e364b
3
- size 2419596824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a7d4cc1dcfcd4c391d19feee0e767479dfcc2cf7d841eadff338fad62848a4e
3
+ size 2419598384
model-00012-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d0a678458837a2849c330e940ad1876d25c22ec6ad195188b1073a326056b0f
3
- size 743249264
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:010f867ab5fde9614a2409f2eeb2803426c9caa3c78b236bd1124b2d6e8cdda6
3
+ size 743249736
model-00013-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:89b542c62b7f740ea213139d345d3ae764d38b0dba562e6f515e0bf430a6e6cd
3
- size 2690128424
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57b8f8ed92e2e6d06e1a2428704436f06337fa3338369bb78394f0f5bc381648
3
+ size 2690130024
model-00014-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf169fc12c95701d9b0f924a83e58b3c366175fda5ead9e833bbeca1104074b4
3
- size 2419596728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2ef91e934a0112f3a32a8dbd5aa4fbe8468a56f7f198c26cb794122313da6cc
3
+ size 2419598296
model-00015-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5a7bc83bace663c712697ab281fab5ae89d56e57e9474b34b0a4fe6274be23be
3
- size 2690158096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dca1f2430074eae6fb1dd0716f63b5d7705f15c0af4083c5c0b59026f921111
3
+ size 2690159704
model-00016-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4acbeac4e6797022c30eeb90bc11572e05672988104df8cf8d863b166c75ed0f
3
- size 2419595936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00fb35e15f0d2c68260cd8e27965bf25d24e32b9fac32b7b3e7c377153a3f1cc
3
+ size 2419597504
model-00017-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd617538d3769d0d9f4432998bcaea719bf7f2c77e8a02d5b94fc8bee809c79a
3
- size 2419596824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80d0328d8b8a63041392d0ef0c0df430ba36e855a1cccb87151afec187f6afd2
3
+ size 2419598384
model-00018-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a508ef0cfa9c8fc9c3995ad2f92312920d0db7ddec2de6f7a36c7bc52496f631
3
- size 2690157720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a25feae8f7eb9444c015dbf51831db5dacae8d5ea60531eac8b85602396193b5
3
+ size 2690159320
model-00019-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:982007cda68b2564716a933612bd4e84bc182dd792fd00de96169d6a19bdb84b
3
- size 2419596320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c82d5c41f363eab38c00a64e68a34454d854bcc6fb301bf7db70b62f850534ae
3
+ size 2419597880
model-00020-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fe89bb2e412c7d324f476bc05f32dc0d2472d7594b134fe5187e8d6ca4eea3a
3
- size 2419596832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cc7f87e8cb02d6702914a4ce8114c52c9e70ee126bdd6cca3c6a4420eaec390
3
+ size 2419598392
model-00021-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:34768aee903ec00ba779596a6303d5d8dcc3e6a70edb33d55f4a9733930085a9
3
- size 2690158440
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11fb987fb55e0ff1627d153d5af3118c38e18ac2932d22ff97d78d8c4072bf76
3
+ size 2690160048
model-00022-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db9ee881a2eda67249bf04fe1c00f4a46289725070cb1da217d8ac995f5e2ddc
3
- size 2419597880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a013c6389434089051e5d2bc2c1c40de7531688c6b17aec9aff0fd8b874535f
3
+ size 2419599440
model-00023-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8750319707affbb5847a354485324f263e9273c69c6a0937c61719fbb04e27cc
3
- size 2690159256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a39d6160bbb09858042f69e70bb1cab8c743a07bd550203d0e00737a0b8f42a7
3
+ size 2690160856
model-00024-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa967ff8f5bba05b77dfacab3b56eb831abfa827ad8894493d219a87c78d9afe
3
- size 2419597088
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73df2f5bebb1ce86db70c25a2f2617e7990c8aeedcab3cbec484b12d9f0b1500
3
+ size 2419598648
model-00025-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7c7a140e9fa2d84015d5b48762bcc1f9b6cbac0bb36a7eaba32a96b596237c8
3
- size 2419597992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:411d592a824c00ab3e5e9b736fd59306adde5f514b0a2a7ec5391c2092034c23
3
+ size 2419599560
model-00026-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8c191f41090d86675ca4af706eeac2717ad7509d5646ad6c0184c824325be97
3
- size 2690158880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:007f3856ea57072f23f8f8f8565943a9cfee5140dbdb13f8712ca5dab6c6d6ec
3
+ size 2690160480
model-00027-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:386f2e4cb50c1849b576eb7219bfc0961ce0a6b68581a3b2703874c552b5c693
3
- size 2419597456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b234dbac0de4b6cea395f75a99c54f6c76a32ea0b33669b1a0efb350e8970749
3
+ size 2419599024
model-00028-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac087c24ff7ddf2e61b978fc69e9efc06b661b44bb71a8aab3d592a3a40c8fb8
3
- size 2419597992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3006ff83a31f8dbfdb1def7dc844789d4daf304d47e29c688260d989983c0f0
3
+ size 2419599560
model-00030-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08a2262f87535da1ef6396319861e1967d3b8eb2d7279f60fc6d9d1db4543896
3
- size 2419597840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf660ac1533cdb4ea708b9b5d7b609e7fdf13a764aea8239d8346155cb87183c
3
+ size 2419599400
model-00031-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb7d93e5f1ab88a3a3b863f9f7651591eb81b5cc91e6a080e998665bd9026477
3
- size 2690159296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd19b8722a5e40b141df029747d3950e979e5ee06a86a77ff4326faa21d24470
3
+ size 2690160896
model-00032-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7fa97d7a73d959403633da4a6217e3d7d9bd43057c4dff4dbac8b18c05c0c47
3
- size 2419597048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14ce2e1da90ac5d8e5f8316842132ac6702cc9a8972ec71df27ea239b5faff0d
3
+ size 2419598608
model-00033-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e9ddef59ee08d7639759407744973038bb4b2eacf3bbfe2920629fcd30cb5a9c
3
- size 2419598000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f11078901a18b533f66de70b0280d2a72608cdafdbb09757f096f30c84e2b884
3
+ size 2419599568
model-00034-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:169f972d86955c8cb23469eb206797038b41cbbcae0c0d8cd3888a2955fed038
3
- size 982731896
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22c20fd5d6cb21d184ad2d00d6cf84cb0b0927ca887add24cf2724bc90cf48fd
3
+ size 982732544
model-00035-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b56fb8f6e4c403f93cd2749d56e75988cee7a3606b81bd52d0a2a6fbe0bc64ed
3
- size 2690129552
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e71f471603dd91a82b34380c7a4e7b9634e6d2cb0c8f6f1d8883d626955c81c
3
+ size 2690131152
model-00036-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:663537c4f8edb63e309c8e8664cceb80c17fffb370cb79ec1a10140b242af8f1
3
- size 2419597904
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d33a83782379f3df984a05b501a080e6c2a741f86687bc9464d08604a4dc10b
3
+ size 2419599464
model-00037-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa66c35c84b8e5e32e04dfaba736a47cc55035bb18c656f402adecb91139a3ae
3
- size 2690159232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:334fa0afe658018f0a5e1cfc3e98f74f8ad5dd9a7d9ee1540baa321e0f083e49
3
+ size 2690160832
model-00038-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:360d38f9e48b7200c9525cdb4742a07eb89bcb67f84d27414337905394d8b86a
3
- size 2419597112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49adc1a1d251ade1651eb086d2232bdf2cc1f32a7bcc1383b06a5d287f874c53
3
+ size 2419598672
model-00039-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3aff94316d3b280687201fced1a8aefe8257ffa9f35d668cd8f17d59aab8816a
3
- size 2419597992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f08a165203e08759671ffb1dd4dc87c7370bfd107d10695290682e21a506c10a
3
+ size 2419599560
model-00041-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:479c7878ad1a39f51d912da9602865f42f38851fdf7dcedd0a5fa26097a35f41
3
- size 2419597488
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f37d92e9075477298a7b71c59fe07fa029cb3f7c96ae81cefd7f6be16c27844
3
+ size 2419599056
model-00042-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e4f67b832165455164c0ef5c8b4c4ab1e7239ef81c305fe56f65f57e45531a9f
3
- size 2419598000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b165463eb9228bd4ffb447c84573379eefbcc2b0e5204573ffca0b5e776c25c8
3
+ size 2419599568
model-00043-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2582049b2ff24855ac82b9bba52020781c863ff3f96da12c432803e28ea0e917
3
- size 2690158464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97eaf724d8f96ecde70fb2eca8d8dbf9e53c1ebdfffee9aafb480e28b88860f5
3
+ size 2690160064
model-00044-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1927be919682684ae0439ba6b066964856365e29bb4fbdf23dc077b2184652c
3
- size 2419597880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7491f0f4d8e7a3e6665948a35dd2128217c150b523895f86bf60a94241f4f313
3
+ size 2419599440
model-00045-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:be95db3b6c815d854909b37d8fab583cd3c4eb12d8860c379100a3f24ba34923
3
- size 2690159256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ba88e4782ef59a18dfb9ca9c7c4d29089c44ec1fb30c23e55b87af659257313
3
+ size 2690160856
model-00046-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8f06f231797a19079091eb5dd50f267d99be062fd2f14ad39b2002ee9022a0d
3
- size 2419597088
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0051cccbd464c9fe19b84815e0423711db3f152f0fa8e4b392eaa0d759e13efe
3
+ size 2419598648
model-00047-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c4565f986bdf13c07b8714d2f20fa8b0f0efa8e87cd190c5c8a0930c8f252ff
3
- size 2419597992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c76b1f12c98ef0384f73154591ca89a4cddda189b2255cfd35b2ef29cf74efe
3
+ size 2419599560
model-00048-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68c2f398b1c275ccd702c692787c8834238e6e7ee168cf561717f8b07b9a655d
3
- size 2690158880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:903d022d70b86587888df25451f134c8cec26090c6bb774ee49d6556e327be71
3
+ size 2690160480
model-00049-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27ba1b2bcb575f5dfeb50f6cded516827816be5b71d60ccc48907ecd42f5e4ce
3
- size 2419597456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21435491477f6becf27718727bb36535d65f1526407f21210d578396d3ab2c30
3
+ size 2419599024
model-00050-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b3b52bae9273672b55eb6efb0cd4a37da61703ccadcc99ab7d5174f66a3d06d
3
- size 2419597992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d8dbef09c158d4eade51514a6bbdcd32534a84d0a9ee9e8cf1ed496e0bc5e14
3
+ size 2419599560
model-00052-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5815794064fe6a4ec045f1ad691fdcd832f8b0992e0bf785f0394b1319ee7fbd
3
- size 2419597840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52956343b66a122f14170f74df2ba9ac6692ef5a8253e6b067c551fcdcc60328
3
+ size 2419599400
model-00054-of-000163.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e57c86a6c8cb686f8f3406893c85ca38a40e8ce436a06e74b417e39aea76ae38
3
- size 2419597048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d2b0158ae5b7df33093a9ead7e3075ef5ac24fa7efc937152c88dbccbd01aff
3
+ size 2419598608