Upload DistillationWrapper
Browse files- config.json +2 -2
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -7,7 +7,7 @@
|
|
| 7 |
"classifier_dropout": null,
|
| 8 |
"dtype": "float32",
|
| 9 |
"embedding_size": 128,
|
| 10 |
-
"expert_intermediate_size":
|
| 11 |
"group_depth": 4,
|
| 12 |
"hidden_act": "gelu",
|
| 13 |
"hidden_dropout_prob": 0.1,
|
|
@@ -23,7 +23,7 @@
|
|
| 23 |
"num_attention_heads": 16,
|
| 24 |
"num_expert_modules": 2,
|
| 25 |
"num_experts": 8,
|
| 26 |
-
"num_hidden_layers":
|
| 27 |
"pad_token_id": 0,
|
| 28 |
"router_jitter_noise": 0.01,
|
| 29 |
"top_k": 2,
|
|
|
|
| 7 |
"classifier_dropout": null,
|
| 8 |
"dtype": "float32",
|
| 9 |
"embedding_size": 128,
|
| 10 |
+
"expert_intermediate_size": 4096,
|
| 11 |
"group_depth": 4,
|
| 12 |
"hidden_act": "gelu",
|
| 13 |
"hidden_dropout_prob": 0.1,
|
|
|
|
| 23 |
"num_attention_heads": 16,
|
| 24 |
"num_expert_modules": 2,
|
| 25 |
"num_experts": 8,
|
| 26 |
+
"num_hidden_layers": 16,
|
| 27 |
"pad_token_id": 0,
|
| 28 |
"router_jitter_noise": 0.01,
|
| 29 |
"top_k": 2,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:bf0ed338d33f8abaa0aa811e0431472b1cd047436ace1d906450dcf72b400f14
|
| 3 |
+
size 304988160
|