JingzeShi commited on
Commit
30f6869
·
verified ·
1 Parent(s): ef53d8e

Upload DogeForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +11 -7
  2. model.safetensors +2 -2
config.json CHANGED
@@ -11,23 +11,27 @@
11
  "bos_token_id": 0,
12
  "dynamic_mask_ratio": 0.0,
13
  "eos_token_id": 1,
 
14
  "hidden_act": "silu",
15
  "hidden_bias": false,
16
  "hidden_dropout": 0.0,
17
- "hidden_size": 1024,
18
  "initializer_range": 0.02,
19
- "intermediate_size": 2048,
20
  "is_moe": false,
21
  "keep_window_size": 2048,
22
  "max_position_embeddings": 2048,
23
  "mlp_bias": false,
24
  "model_type": "doge",
25
  "norm_topk_prob": false,
26
- "num_attention_heads": 8,
27
- "num_experts": 2048,
28
- "num_experts_per_tok": 8,
29
- "num_hidden_layers": 32,
30
- "num_key_value_heads": 4,
 
 
 
31
  "output_router_logits": false,
32
  "pad_token_id": 2,
33
  "rms_norm_eps": 1e-06,
 
11
  "bos_token_id": 0,
12
  "dynamic_mask_ratio": 0.0,
13
  "eos_token_id": 1,
14
+ "expert_retrieval_size": 256,
15
  "hidden_act": "silu",
16
  "hidden_bias": false,
17
  "hidden_dropout": 0.0,
18
+ "hidden_size": 512,
19
  "initializer_range": 0.02,
20
+ "intermediate_size": 1024,
21
  "is_moe": false,
22
  "keep_window_size": 2048,
23
  "max_position_embeddings": 2048,
24
  "mlp_bias": false,
25
  "model_type": "doge",
26
  "norm_topk_prob": false,
27
+ "num_attention_heads": 4,
28
+ "num_cdmoe_experts": 16348,
29
+ "num_cdmoe_experts_per_head": 8,
30
+ "num_cdmoe_heads": 4,
31
+ "num_experts": 16384,
32
+ "num_experts_per_tok": 64,
33
+ "num_hidden_layers": 16,
34
+ "num_key_value_heads": 2,
35
  "output_router_logits": false,
36
  "pad_token_id": 2,
37
  "rms_norm_eps": 1e-06,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:df1fdb9228d0e7857ccbda40105b0ac0234764a75db9ca3c24ee831fb86941f4
3
- size 671661936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c0974eb9bdd4c39eb840024424b9ca67186461d7ad8a7b414289cedd3f2289d
3
+ size 109174080