Mishamq commited on
Commit
ce2288e
·
verified ·
1 Parent(s): dc86241

Update max_position_embeddings to 131074

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "mamba_dt_rank": 256,
26
  "mamba_expand": 2,
27
  "mamba_proj_bias": false,
28
- "max_position_embeddings": 8194,
29
  "model_type": "hybridna",
30
  "n_groups": 8,
31
  "norm_before_gate": true,
 
25
  "mamba_dt_rank": 256,
26
  "mamba_expand": 2,
27
  "mamba_proj_bias": false,
28
+ "max_position_embeddings": 131074,
29
  "model_type": "hybridna",
30
  "n_groups": 8,
31
  "norm_before_gate": true,