thehekimoghlu commited on
Commit
f3d8455
·
verified ·
1 Parent(s): a2162b6

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -5,8 +5,8 @@
5
  "attention_dropout": 0.0,
6
  "auto_map": {
7
  "AutoConfig": "configuration.OpenModelConfig",
8
- "AutoModel": "modeling_bailing_moe_v2.OpenModelModel",
9
- "AutoModelForCausalLM": "modeling_bailing_moe_v2.OpenModelForCausalLM"
10
  },
11
  "num_hidden_layers": 80,
12
  "hidden_size": 8192,
 
5
  "attention_dropout": 0.0,
6
  "auto_map": {
7
  "AutoConfig": "configuration.OpenModelConfig",
8
+ "AutoModel": "modeling.OpenModelModel",
9
+ "AutoModelForCausalLM": "modeling.OpenModelForCausalLM"
10
  },
11
  "num_hidden_layers": 80,
12
  "hidden_size": 8192,