TAnGx1411 commited on
Commit
8d2b607
·
verified ·
1 Parent(s): 9738b7a

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +239 -0
  2. adapter_model.safetensors +3 -0
adapter_config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "GptOssForCausalLM",
5
+ "parent_library": "transformers.models.gpt_oss.modeling_gpt_oss",
6
+ "unsloth_fixed": true
7
+ },
8
+ "base_model_name_or_path": "unsloth/gpt-oss-20b-unsloth-bnb-4bit",
9
+ "bias": "none",
10
+ "corda_config": null,
11
+ "eva_config": null,
12
+ "exclude_modules": null,
13
+ "fan_in_fan_out": false,
14
+ "inference_mode": true,
15
+ "init_lora_weights": true,
16
+ "layer_replication": null,
17
+ "layers_pattern": null,
18
+ "layers_to_transform": null,
19
+ "loftq_config": {},
20
+ "lora_alpha": 16,
21
+ "lora_bias": false,
22
+ "lora_dropout": 0,
23
+ "megatron_config": null,
24
+ "megatron_core": "megatron.core",
25
+ "modules_to_save": null,
26
+ "peft_type": "LORA",
27
+ "qalora_group_size": 16,
28
+ "r": 8,
29
+ "rank_pattern": {},
30
+ "revision": null,
31
+ "target_modules": [
32
+ "down_proj",
33
+ "up_proj",
34
+ "gate_proj",
35
+ "q_proj",
36
+ "k_proj",
37
+ "v_proj",
38
+ "o_proj"
39
+ ],
40
+ "target_parameters": [
41
+ "model.layers.7.mlp.experts.gate_up_projs.0.weight",
42
+ "model.layers.7.mlp.experts.gate_up_projs.1.weight",
43
+ "model.layers.7.mlp.experts.gate_up_projs.2.weight",
44
+ "model.layers.7.mlp.experts.gate_up_projs.3.weight",
45
+ "model.layers.7.mlp.experts.gate_up_projs.4.weight",
46
+ "model.layers.7.mlp.experts.gate_up_projs.5.weight",
47
+ "model.layers.7.mlp.experts.gate_up_projs.6.weight",
48
+ "model.layers.7.mlp.experts.gate_up_projs.7.weight",
49
+ "model.layers.7.mlp.experts.gate_up_projs.8.weight",
50
+ "model.layers.7.mlp.experts.gate_up_projs.9.weight",
51
+ "model.layers.7.mlp.experts.gate_up_projs.10.weight",
52
+ "model.layers.7.mlp.experts.gate_up_projs.11.weight",
53
+ "model.layers.7.mlp.experts.gate_up_projs.12.weight",
54
+ "model.layers.7.mlp.experts.gate_up_projs.13.weight",
55
+ "model.layers.7.mlp.experts.gate_up_projs.14.weight",
56
+ "model.layers.7.mlp.experts.gate_up_projs.15.weight",
57
+ "model.layers.7.mlp.experts.gate_up_projs.16.weight",
58
+ "model.layers.7.mlp.experts.gate_up_projs.17.weight",
59
+ "model.layers.7.mlp.experts.gate_up_projs.18.weight",
60
+ "model.layers.7.mlp.experts.gate_up_projs.19.weight",
61
+ "model.layers.7.mlp.experts.gate_up_projs.20.weight",
62
+ "model.layers.7.mlp.experts.gate_up_projs.21.weight",
63
+ "model.layers.7.mlp.experts.gate_up_projs.22.weight",
64
+ "model.layers.7.mlp.experts.gate_up_projs.23.weight",
65
+ "model.layers.7.mlp.experts.gate_up_projs.24.weight",
66
+ "model.layers.7.mlp.experts.gate_up_projs.25.weight",
67
+ "model.layers.7.mlp.experts.gate_up_projs.26.weight",
68
+ "model.layers.7.mlp.experts.gate_up_projs.27.weight",
69
+ "model.layers.7.mlp.experts.gate_up_projs.28.weight",
70
+ "model.layers.7.mlp.experts.gate_up_projs.29.weight",
71
+ "model.layers.7.mlp.experts.gate_up_projs.30.weight",
72
+ "model.layers.7.mlp.experts.gate_up_projs.31.weight",
73
+ "model.layers.7.mlp.experts.down_projs.0.weight",
74
+ "model.layers.7.mlp.experts.down_projs.1.weight",
75
+ "model.layers.7.mlp.experts.down_projs.2.weight",
76
+ "model.layers.7.mlp.experts.down_projs.3.weight",
77
+ "model.layers.7.mlp.experts.down_projs.4.weight",
78
+ "model.layers.7.mlp.experts.down_projs.5.weight",
79
+ "model.layers.7.mlp.experts.down_projs.6.weight",
80
+ "model.layers.7.mlp.experts.down_projs.7.weight",
81
+ "model.layers.7.mlp.experts.down_projs.8.weight",
82
+ "model.layers.7.mlp.experts.down_projs.9.weight",
83
+ "model.layers.7.mlp.experts.down_projs.10.weight",
84
+ "model.layers.7.mlp.experts.down_projs.11.weight",
85
+ "model.layers.7.mlp.experts.down_projs.12.weight",
86
+ "model.layers.7.mlp.experts.down_projs.13.weight",
87
+ "model.layers.7.mlp.experts.down_projs.14.weight",
88
+ "model.layers.7.mlp.experts.down_projs.15.weight",
89
+ "model.layers.7.mlp.experts.down_projs.16.weight",
90
+ "model.layers.7.mlp.experts.down_projs.17.weight",
91
+ "model.layers.7.mlp.experts.down_projs.18.weight",
92
+ "model.layers.7.mlp.experts.down_projs.19.weight",
93
+ "model.layers.7.mlp.experts.down_projs.20.weight",
94
+ "model.layers.7.mlp.experts.down_projs.21.weight",
95
+ "model.layers.7.mlp.experts.down_projs.22.weight",
96
+ "model.layers.7.mlp.experts.down_projs.23.weight",
97
+ "model.layers.7.mlp.experts.down_projs.24.weight",
98
+ "model.layers.7.mlp.experts.down_projs.25.weight",
99
+ "model.layers.7.mlp.experts.down_projs.26.weight",
100
+ "model.layers.7.mlp.experts.down_projs.27.weight",
101
+ "model.layers.7.mlp.experts.down_projs.28.weight",
102
+ "model.layers.7.mlp.experts.down_projs.29.weight",
103
+ "model.layers.7.mlp.experts.down_projs.30.weight",
104
+ "model.layers.7.mlp.experts.down_projs.31.weight",
105
+ "model.layers.15.mlp.experts.gate_up_projs.0.weight",
106
+ "model.layers.15.mlp.experts.gate_up_projs.1.weight",
107
+ "model.layers.15.mlp.experts.gate_up_projs.2.weight",
108
+ "model.layers.15.mlp.experts.gate_up_projs.3.weight",
109
+ "model.layers.15.mlp.experts.gate_up_projs.4.weight",
110
+ "model.layers.15.mlp.experts.gate_up_projs.5.weight",
111
+ "model.layers.15.mlp.experts.gate_up_projs.6.weight",
112
+ "model.layers.15.mlp.experts.gate_up_projs.7.weight",
113
+ "model.layers.15.mlp.experts.gate_up_projs.8.weight",
114
+ "model.layers.15.mlp.experts.gate_up_projs.9.weight",
115
+ "model.layers.15.mlp.experts.gate_up_projs.10.weight",
116
+ "model.layers.15.mlp.experts.gate_up_projs.11.weight",
117
+ "model.layers.15.mlp.experts.gate_up_projs.12.weight",
118
+ "model.layers.15.mlp.experts.gate_up_projs.13.weight",
119
+ "model.layers.15.mlp.experts.gate_up_projs.14.weight",
120
+ "model.layers.15.mlp.experts.gate_up_projs.15.weight",
121
+ "model.layers.15.mlp.experts.gate_up_projs.16.weight",
122
+ "model.layers.15.mlp.experts.gate_up_projs.17.weight",
123
+ "model.layers.15.mlp.experts.gate_up_projs.18.weight",
124
+ "model.layers.15.mlp.experts.gate_up_projs.19.weight",
125
+ "model.layers.15.mlp.experts.gate_up_projs.20.weight",
126
+ "model.layers.15.mlp.experts.gate_up_projs.21.weight",
127
+ "model.layers.15.mlp.experts.gate_up_projs.22.weight",
128
+ "model.layers.15.mlp.experts.gate_up_projs.23.weight",
129
+ "model.layers.15.mlp.experts.gate_up_projs.24.weight",
130
+ "model.layers.15.mlp.experts.gate_up_projs.25.weight",
131
+ "model.layers.15.mlp.experts.gate_up_projs.26.weight",
132
+ "model.layers.15.mlp.experts.gate_up_projs.27.weight",
133
+ "model.layers.15.mlp.experts.gate_up_projs.28.weight",
134
+ "model.layers.15.mlp.experts.gate_up_projs.29.weight",
135
+ "model.layers.15.mlp.experts.gate_up_projs.30.weight",
136
+ "model.layers.15.mlp.experts.gate_up_projs.31.weight",
137
+ "model.layers.15.mlp.experts.down_projs.0.weight",
138
+ "model.layers.15.mlp.experts.down_projs.1.weight",
139
+ "model.layers.15.mlp.experts.down_projs.2.weight",
140
+ "model.layers.15.mlp.experts.down_projs.3.weight",
141
+ "model.layers.15.mlp.experts.down_projs.4.weight",
142
+ "model.layers.15.mlp.experts.down_projs.5.weight",
143
+ "model.layers.15.mlp.experts.down_projs.6.weight",
144
+ "model.layers.15.mlp.experts.down_projs.7.weight",
145
+ "model.layers.15.mlp.experts.down_projs.8.weight",
146
+ "model.layers.15.mlp.experts.down_projs.9.weight",
147
+ "model.layers.15.mlp.experts.down_projs.10.weight",
148
+ "model.layers.15.mlp.experts.down_projs.11.weight",
149
+ "model.layers.15.mlp.experts.down_projs.12.weight",
150
+ "model.layers.15.mlp.experts.down_projs.13.weight",
151
+ "model.layers.15.mlp.experts.down_projs.14.weight",
152
+ "model.layers.15.mlp.experts.down_projs.15.weight",
153
+ "model.layers.15.mlp.experts.down_projs.16.weight",
154
+ "model.layers.15.mlp.experts.down_projs.17.weight",
155
+ "model.layers.15.mlp.experts.down_projs.18.weight",
156
+ "model.layers.15.mlp.experts.down_projs.19.weight",
157
+ "model.layers.15.mlp.experts.down_projs.20.weight",
158
+ "model.layers.15.mlp.experts.down_projs.21.weight",
159
+ "model.layers.15.mlp.experts.down_projs.22.weight",
160
+ "model.layers.15.mlp.experts.down_projs.23.weight",
161
+ "model.layers.15.mlp.experts.down_projs.24.weight",
162
+ "model.layers.15.mlp.experts.down_projs.25.weight",
163
+ "model.layers.15.mlp.experts.down_projs.26.weight",
164
+ "model.layers.15.mlp.experts.down_projs.27.weight",
165
+ "model.layers.15.mlp.experts.down_projs.28.weight",
166
+ "model.layers.15.mlp.experts.down_projs.29.weight",
167
+ "model.layers.15.mlp.experts.down_projs.30.weight",
168
+ "model.layers.15.mlp.experts.down_projs.31.weight",
169
+ "model.layers.23.mlp.experts.gate_up_projs.0.weight",
170
+ "model.layers.23.mlp.experts.gate_up_projs.1.weight",
171
+ "model.layers.23.mlp.experts.gate_up_projs.2.weight",
172
+ "model.layers.23.mlp.experts.gate_up_projs.3.weight",
173
+ "model.layers.23.mlp.experts.gate_up_projs.4.weight",
174
+ "model.layers.23.mlp.experts.gate_up_projs.5.weight",
175
+ "model.layers.23.mlp.experts.gate_up_projs.6.weight",
176
+ "model.layers.23.mlp.experts.gate_up_projs.7.weight",
177
+ "model.layers.23.mlp.experts.gate_up_projs.8.weight",
178
+ "model.layers.23.mlp.experts.gate_up_projs.9.weight",
179
+ "model.layers.23.mlp.experts.gate_up_projs.10.weight",
180
+ "model.layers.23.mlp.experts.gate_up_projs.11.weight",
181
+ "model.layers.23.mlp.experts.gate_up_projs.12.weight",
182
+ "model.layers.23.mlp.experts.gate_up_projs.13.weight",
183
+ "model.layers.23.mlp.experts.gate_up_projs.14.weight",
184
+ "model.layers.23.mlp.experts.gate_up_projs.15.weight",
185
+ "model.layers.23.mlp.experts.gate_up_projs.16.weight",
186
+ "model.layers.23.mlp.experts.gate_up_projs.17.weight",
187
+ "model.layers.23.mlp.experts.gate_up_projs.18.weight",
188
+ "model.layers.23.mlp.experts.gate_up_projs.19.weight",
189
+ "model.layers.23.mlp.experts.gate_up_projs.20.weight",
190
+ "model.layers.23.mlp.experts.gate_up_projs.21.weight",
191
+ "model.layers.23.mlp.experts.gate_up_projs.22.weight",
192
+ "model.layers.23.mlp.experts.gate_up_projs.23.weight",
193
+ "model.layers.23.mlp.experts.gate_up_projs.24.weight",
194
+ "model.layers.23.mlp.experts.gate_up_projs.25.weight",
195
+ "model.layers.23.mlp.experts.gate_up_projs.26.weight",
196
+ "model.layers.23.mlp.experts.gate_up_projs.27.weight",
197
+ "model.layers.23.mlp.experts.gate_up_projs.28.weight",
198
+ "model.layers.23.mlp.experts.gate_up_projs.29.weight",
199
+ "model.layers.23.mlp.experts.gate_up_projs.30.weight",
200
+ "model.layers.23.mlp.experts.gate_up_projs.31.weight",
201
+ "model.layers.23.mlp.experts.down_projs.0.weight",
202
+ "model.layers.23.mlp.experts.down_projs.1.weight",
203
+ "model.layers.23.mlp.experts.down_projs.2.weight",
204
+ "model.layers.23.mlp.experts.down_projs.3.weight",
205
+ "model.layers.23.mlp.experts.down_projs.4.weight",
206
+ "model.layers.23.mlp.experts.down_projs.5.weight",
207
+ "model.layers.23.mlp.experts.down_projs.6.weight",
208
+ "model.layers.23.mlp.experts.down_projs.7.weight",
209
+ "model.layers.23.mlp.experts.down_projs.8.weight",
210
+ "model.layers.23.mlp.experts.down_projs.9.weight",
211
+ "model.layers.23.mlp.experts.down_projs.10.weight",
212
+ "model.layers.23.mlp.experts.down_projs.11.weight",
213
+ "model.layers.23.mlp.experts.down_projs.12.weight",
214
+ "model.layers.23.mlp.experts.down_projs.13.weight",
215
+ "model.layers.23.mlp.experts.down_projs.14.weight",
216
+ "model.layers.23.mlp.experts.down_projs.15.weight",
217
+ "model.layers.23.mlp.experts.down_projs.16.weight",
218
+ "model.layers.23.mlp.experts.down_projs.17.weight",
219
+ "model.layers.23.mlp.experts.down_projs.18.weight",
220
+ "model.layers.23.mlp.experts.down_projs.19.weight",
221
+ "model.layers.23.mlp.experts.down_projs.20.weight",
222
+ "model.layers.23.mlp.experts.down_projs.21.weight",
223
+ "model.layers.23.mlp.experts.down_projs.22.weight",
224
+ "model.layers.23.mlp.experts.down_projs.23.weight",
225
+ "model.layers.23.mlp.experts.down_projs.24.weight",
226
+ "model.layers.23.mlp.experts.down_projs.25.weight",
227
+ "model.layers.23.mlp.experts.down_projs.26.weight",
228
+ "model.layers.23.mlp.experts.down_projs.27.weight",
229
+ "model.layers.23.mlp.experts.down_projs.28.weight",
230
+ "model.layers.23.mlp.experts.down_projs.29.weight",
231
+ "model.layers.23.mlp.experts.down_projs.30.weight",
232
+ "model.layers.23.mlp.experts.down_projs.31.weight"
233
+ ],
234
+ "task_type": "CAUSAL_LM",
235
+ "trainable_token_indices": null,
236
+ "use_dora": false,
237
+ "use_qalora": false,
238
+ "use_rslora": false
239
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:933eec8e27bf9d74bb461787ca3774c896323becddbf7e3a4e15c654e1335d7a
3
+ size 60242672