VirtualInsight commited on
Commit
21afe4a
·
verified ·
1 Parent(s): 1917715

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -9
config.json CHANGED
@@ -1,18 +1,21 @@
1
  {
 
 
2
  "vocab_size": 32000,
3
  "hidden_size": 768,
4
- "n_heads": 12,
5
- "n_kv_heads": 4,
6
- "n_kv_groups": 3,
 
7
  "head_dim": 64,
8
- "n_layers": 12,
9
  "attention_bias": false,
10
- "intermediate_size": 3072,
11
  "mlp_bias": false,
12
- "eps": 1e-5,
13
- "dropout": 0.0,
14
  "max_position_embeddings": 2048,
15
  "pre_norm": true,
16
- "tie_weights": true,
 
17
  "max_seq_len": 2048
18
- }
 
1
  {
2
+ "architectures": ["LumenModel"],
3
+ "model_type": "lumen",
4
  "vocab_size": 32000,
5
  "hidden_size": 768,
6
+ "num_attention_heads": 12,
7
+ "num_key_value_heads": 4,
8
+ "num_hidden_layers": 12,
9
+ "intermediate_size": 3072,
10
  "head_dim": 64,
11
+ "n_kv_groups": 3,
12
  "attention_bias": false,
 
13
  "mlp_bias": false,
14
+ "layer_norm_eps": 1e-5,
15
+ "hidden_dropout_prob": 0.0,
16
  "max_position_embeddings": 2048,
17
  "pre_norm": true,
18
+ "tie_word_embeddings": true,
19
+ "torch_dtype": "bfloat16",
20
  "max_seq_len": 2048
21
+ }