KitsuVp commited on
Commit
a29f88e
·
verified ·
1 Parent(s): 04da2a7

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -3
config.json CHANGED
@@ -2,14 +2,15 @@
2
  "architectures": [
3
  "NeoLLMForCausalLM"
4
  ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.1,
7
- "dropout_rate": 0.1,
8
  "auto_map": {
9
  "AutoConfig": "configuration_neollm.NeoLLMConfig",
10
  "AutoModel": "modeling_neollm.NeoLLMModel",
11
  "AutoModelForCausalLM": "modeling_neollm.NeoLLMForCausalLM"
12
  },
 
 
 
 
13
  "dtype": "bfloat16",
14
  "eos_token_id": 151645,
15
  "fan_ratio": 0.125,
 
2
  "architectures": [
3
  "NeoLLMForCausalLM"
4
  ],
 
 
 
5
  "auto_map": {
6
  "AutoConfig": "configuration_neollm.NeoLLMConfig",
7
  "AutoModel": "modeling_neollm.NeoLLMModel",
8
  "AutoModelForCausalLM": "modeling_neollm.NeoLLMForCausalLM"
9
  },
10
+ "attention_bias": false,
11
+ "attention_dropout": 0.1,
12
+ "dropout_rate": 0.1,
13
+
14
  "dtype": "bfloat16",
15
  "eos_token_id": 151645,
16
  "fan_ratio": 0.125,