qgallouedec HF Staff commited on
Commit
6182e1e
·
verified ·
1 Parent(s): 86099fa

Upload DeepseekV3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +34 -6
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -4,8 +4,15 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
 
 
 
 
7
  "bos_token_id": 0,
 
8
  "eos_token_id": 1,
 
9
  "first_k_dense_replace": 3,
10
  "head_dim": 64,
11
  "hidden_act": "silu",
@@ -13,9 +20,10 @@
13
  "initializer_range": 0.02,
14
  "intermediate_size": 32,
15
  "kv_lora_rank": 512,
16
- "max_position_embeddings": 4096,
17
  "model_type": "deepseek_v3",
18
  "moe_intermediate_size": 2048,
 
19
  "n_group": 8,
20
  "n_routed_experts": 256,
21
  "n_shared_experts": 1,
@@ -24,21 +32,41 @@
24
  "num_experts_per_tok": 8,
25
  "num_hidden_layers": 2,
26
  "num_key_value_heads": 2,
 
27
  "pretraining_tp": 1,
28
  "q_lora_rank": 1536,
29
  "qk_head_dim": 192,
30
  "qk_nope_head_dim": 128,
31
  "qk_rope_head_dim": 64,
 
 
 
 
 
 
 
 
 
32
  "rms_norm_eps": 1e-06,
33
  "rope_interleave": true,
34
- "rope_scaling": null,
35
- "rope_theta": 10000.0,
 
 
 
 
 
 
 
 
 
36
  "routed_scaling_factor": 2.5,
 
37
  "tie_word_embeddings": false,
38
  "topk_group": 4,
39
- "torch_dtype": "float32",
40
- "transformers_version": "4.55.0.dev0",
41
  "use_cache": true,
42
  "v_head_dim": 128,
43
- "vocab_size": 128818
44
  }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_deepseek.DeepseekV3Config",
9
+ "AutoModel": "modeling_deepseek.DeepseekV3Model",
10
+ "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM"
11
+ },
12
  "bos_token_id": 0,
13
+ "dtype": "float32",
14
  "eos_token_id": 1,
15
+ "ep_size": 1,
16
  "first_k_dense_replace": 3,
17
  "head_dim": 64,
18
  "hidden_act": "silu",
 
20
  "initializer_range": 0.02,
21
  "intermediate_size": 32,
22
  "kv_lora_rank": 512,
23
+ "max_position_embeddings": 163840,
24
  "model_type": "deepseek_v3",
25
  "moe_intermediate_size": 2048,
26
+ "moe_layer_freq": 1,
27
  "n_group": 8,
28
  "n_routed_experts": 256,
29
  "n_shared_experts": 1,
 
32
  "num_experts_per_tok": 8,
33
  "num_hidden_layers": 2,
34
  "num_key_value_heads": 2,
35
+ "num_nextn_predict_layers": 1,
36
  "pretraining_tp": 1,
37
  "q_lora_rank": 1536,
38
  "qk_head_dim": 192,
39
  "qk_nope_head_dim": 128,
40
  "qk_rope_head_dim": 64,
41
+ "quantization_config": {
42
+ "activation_scheme": "dynamic",
43
+ "fmt": "e4m3",
44
+ "quant_method": "fp8",
45
+ "weight_block_size": [
46
+ 128,
47
+ 128
48
+ ]
49
+ },
50
  "rms_norm_eps": 1e-06,
51
  "rope_interleave": true,
52
+ "rope_scaling": {
53
+ "beta_fast": 32.0,
54
+ "beta_slow": 1.0,
55
+ "factor": 40.0,
56
+ "mscale": 1.0,
57
+ "mscale_all_dim": 1.0,
58
+ "original_max_position_embeddings": 4096,
59
+ "rope_type": "yarn",
60
+ "type": "yarn"
61
+ },
62
+ "rope_theta": 10000,
63
  "routed_scaling_factor": 2.5,
64
+ "scoring_func": "sigmoid",
65
  "tie_word_embeddings": false,
66
  "topk_group": 4,
67
+ "topk_method": "noaux_tc",
68
+ "transformers_version": "4.57.0.dev0",
69
  "use_cache": true,
70
  "v_head_dim": 128,
71
+ "vocab_size": 129280
72
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
- "transformers_version": "4.55.0.dev0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
+ "transformers_version": "4.57.0.dev0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:62f612c2df3eedce6aeecb7332b3a0095a68033234accf815a39ea1a6f52106f
3
- size 22069392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ce7d23aa9cca2b358889c05b0b453ee01de251c0a32c938bdb23bbe148e9173
3
+ size 22098960