Update config.json with correct max_position_embeddings (32k to 131k)

#14
by Molbap HF Staff - opened
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -10,7 +10,7 @@
10
  "hidden_size": 6144,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 16384,
13
- "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 48,
16
  "num_hidden_layers": 56,
 
10
  "hidden_size": 6144,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 16384,
13
+ "max_position_embeddings": 131072,
14
  "model_type": "mistral",
15
  "num_attention_heads": 48,
16
  "num_hidden_layers": 56,