abhishekchohan commited on
Commit
bb60b96
·
verified ·
1 Parent(s): f6e8e15

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -75,7 +75,7 @@
75
  "full_attention",
76
  "full_attention"
77
  ],
78
- "max_position_embeddings": 32768,
79
  "max_window_layers": 64,
80
  "model_type": "qwen2",
81
  "num_attention_heads": 40,
 
75
  "full_attention",
76
  "full_attention"
77
  ],
78
+ "max_position_embeddings": 131072,
79
  "max_window_layers": 64,
80
  "model_type": "qwen2",
81
  "num_attention_heads": 40,