Upload config.json with huggingface_hub
Browse files- config.json +2 -6
config.json
CHANGED
|
@@ -4,11 +4,7 @@
|
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
-
|
| 8 |
-
"AutoConfig": "configuration_deepseek.DeepseekV2Config",
|
| 9 |
-
"AutoModel": "modeling_deepseek.DeepseekV2Model",
|
| 10 |
-
"AutoModelForCausalLM": "modeling_deepseek.DeepseekV2ForCausalLM"
|
| 11 |
-
},
|
| 12 |
"aux_loss_alpha": 0.001,
|
| 13 |
"bos_token_id": 100000,
|
| 14 |
"eos_token_id": 100001,
|
|
@@ -19,7 +15,7 @@
|
|
| 19 |
"intermediate_size": 10944,
|
| 20 |
"kv_lora_rank": 512,
|
| 21 |
"max_position_embeddings": 163840,
|
| 22 |
-
"model_type": "
|
| 23 |
"moe_intermediate_size": 1408,
|
| 24 |
"moe_layer_freq": 1,
|
| 25 |
"n_group": 1,
|
|
|
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
"aux_loss_alpha": 0.001,
|
| 9 |
"bos_token_id": 100000,
|
| 10 |
"eos_token_id": 100001,
|
|
|
|
| 15 |
"intermediate_size": 10944,
|
| 16 |
"kv_lora_rank": 512,
|
| 17 |
"max_position_embeddings": 163840,
|
| 18 |
+
"model_type": "causal_lm",
|
| 19 |
"moe_intermediate_size": 1408,
|
| 20 |
"moe_layer_freq": 1,
|
| 21 |
"n_group": 1,
|