zeekay commited on
Commit
d7543e7
·
verified ·
1 Parent(s): 6007c90

Upload mlx/config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. mlx/config.json +16 -15
mlx/config.json CHANGED
@@ -1,36 +1,37 @@
1
  {
2
  "architectures": [
3
- "Qwen2ForCausalLM"
4
  ],
 
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "eos_token_id": 151645,
 
8
  "hidden_act": "silu",
9
- "hidden_size": 2048,
10
  "initializer_range": 0.02,
11
- "intermediate_size": 11008,
12
- "max_position_embeddings": 32768,
13
- "max_window_layers": 70,
14
- "model_type": "qwen2",
15
- "num_attention_heads": 16,
16
  "num_hidden_layers": 36,
17
- "num_key_value_heads": 2,
18
  "quantization": {
19
  "group_size": 64,
20
- "bits": 4,
21
- "mode": "affine"
22
  },
23
  "quantization_config": {
24
  "group_size": 64,
25
- "bits": 4,
26
- "mode": "affine"
27
  },
28
  "rms_norm_eps": 1e-06,
29
- "rope_theta": 1000000.0,
30
- "sliding_window": 32768,
 
31
  "tie_word_embeddings": true,
32
  "torch_dtype": "bfloat16",
33
- "transformers_version": "4.43.1",
34
  "use_cache": true,
35
  "use_sliding_window": false,
36
  "vocab_size": 151936
 
1
  {
2
  "architectures": [
3
+ "Qwen3ForCausalLM"
4
  ],
5
+ "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
  "eos_token_id": 151645,
9
+ "head_dim": 128,
10
  "hidden_act": "silu",
11
+ "hidden_size": 2560,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 9728,
14
+ "max_position_embeddings": 40960,
15
+ "max_window_layers": 36,
16
+ "model_type": "qwen3",
17
+ "num_attention_heads": 32,
18
  "num_hidden_layers": 36,
19
+ "num_key_value_heads": 8,
20
  "quantization": {
21
  "group_size": 64,
22
+ "bits": 4
 
23
  },
24
  "quantization_config": {
25
  "group_size": 64,
26
+ "bits": 4
 
27
  },
28
  "rms_norm_eps": 1e-06,
29
+ "rope_scaling": null,
30
+ "rope_theta": 1000000,
31
+ "sliding_window": null,
32
  "tie_word_embeddings": true,
33
  "torch_dtype": "bfloat16",
34
+ "transformers_version": "4.51.0",
35
  "use_cache": true,
36
  "use_sliding_window": false,
37
  "vocab_size": 151936