anthonym21 commited on
Commit
0acebcd
·
verified ·
1 Parent(s): cf4bcac

Update config.json with auto_map and HF fields

Browse files
Files changed (1) hide show
  1. config.json +13 -9
config.json CHANGED
@@ -1,5 +1,10 @@
1
  {
2
- "architecture": "Eve-2-MoE",
 
 
 
 
 
3
  "vocab_size": 50304,
4
  "n_layer": 12,
5
  "n_embd": 512,
@@ -10,12 +15,11 @@
10
  "top_k": 2,
11
  "expert_intermediate_size": 1408,
12
  "shared_expert_intermediate_size": 1408,
 
13
  "rope_theta": 10000.0,
14
- "auto_map": {
15
- "AutoConfig": "configuration_eve.EveConfig",
16
- "AutoModelForCausalLM": "modeling_eve.DeepSeekMoE"
17
- },
18
- "architectures": [
19
- "DeepSeekMoE"
20
- ]
21
- }
 
1
  {
2
+ "model_type": "eve-moe",
3
+ "architectures": ["EveMoEForCausalLM"],
4
+ "auto_map": {
5
+ "AutoConfig": "configuration_eve.EveConfig",
6
+ "AutoModelForCausalLM": "modeling_eve.EveMoEForCausalLM"
7
+ },
8
  "vocab_size": 50304,
9
  "n_layer": 12,
10
  "n_embd": 512,
 
15
  "top_k": 2,
16
  "expert_intermediate_size": 1408,
17
  "shared_expert_intermediate_size": 1408,
18
+ "router_aux_loss_coef": 0.01,
19
  "rope_theta": 10000.0,
20
+ "use_checkpointing": false,
21
+ "tie_word_embeddings": true,
22
+ "eos_token_id": 50256,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.46.0"
25
+ }