ybelkada commited on
Commit
25978d5
·
1 Parent(s): bd98703

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -13,9 +13,9 @@
13
  "intermediate_size": 64,
14
  "max_position_embeddings": 2048,
15
  "model_type": "llama",
16
- "num_attention_heads": 4,
17
  "num_hidden_layers": 2,
18
- "num_key_value_heads": 2,
19
  "pad_token_id": -1,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
 
13
  "intermediate_size": 64,
14
  "max_position_embeddings": 2048,
15
  "model_type": "llama",
16
+ "num_attention_heads": 64,
17
  "num_hidden_layers": 2,
18
+ "num_key_value_heads": 8,
19
  "pad_token_id": -1,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,