burnboom commited on
Commit
3390dd7
·
verified ·
1 Parent(s): 32cc2c0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -44,7 +44,7 @@
44
  ],
45
  "max_position_embeddings": 40960,
46
  "max_window_layers": 28,
47
- "model_type": "qwen",
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,
 
44
  ],
45
  "max_position_embeddings": 40960,
46
  "max_window_layers": 28,
47
+ "model_type": "qwen3",
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,