exlaw commited on
Commit
62b836d
·
verified ·
1 Parent(s): 7201add

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -54,7 +54,7 @@
54
  "full_attention",
55
  "full_attention"
56
  ],
57
- "max_position_embeddings": 16384,
58
  "max_window_layers": 28,
59
  "model_type": "qwen3_scale_seq",
60
  "num_attention_heads": 32,
 
54
  "full_attention",
55
  "full_attention"
56
  ],
57
+ "max_position_embeddings": 131072,
58
  "max_window_layers": 28,
59
  "model_type": "qwen3_scale_seq",
60
  "num_attention_heads": 32,