alphaXiv commited on
Commit
d0bdc58
·
verified ·
1 Parent(s): f1d268c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +66 -93
config.json CHANGED
@@ -1,102 +1,75 @@
1
  {
2
  "architectures": [
3
- "Qwen3_5ForConditionalGeneration"
4
  ],
 
 
 
 
5
  "dtype": "bfloat16",
6
- "image_token_id": 248056,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  "model_type": "qwen3_5",
8
- "text_config": {
9
- "attention_bias": false,
10
- "attention_dropout": 0.0,
11
- "attn_output_gate": true,
12
- "bos_token_id": null,
13
- "dtype": "bfloat16",
14
- "eos_token_id": 248044,
15
- "full_attention_interval": 4,
16
- "head_dim": 256,
17
- "hidden_act": "silu",
18
- "hidden_size": 2048,
19
- "initializer_range": 0.02,
20
- "intermediate_size": 6144,
21
- "layer_types": [
22
- "linear_attention",
23
- "linear_attention",
24
- "linear_attention",
25
- "full_attention",
26
- "linear_attention",
27
- "linear_attention",
28
- "linear_attention",
29
- "full_attention",
30
- "linear_attention",
31
- "linear_attention",
32
- "linear_attention",
33
- "full_attention",
34
- "linear_attention",
35
- "linear_attention",
36
- "linear_attention",
37
- "full_attention",
38
- "linear_attention",
39
- "linear_attention",
40
- "linear_attention",
41
- "full_attention",
42
- "linear_attention",
43
- "linear_attention",
44
- "linear_attention",
45
- "full_attention"
46
  ],
47
- "linear_conv_kernel_dim": 4,
48
- "linear_key_head_dim": 128,
49
- "linear_num_key_heads": 16,
50
- "linear_num_value_heads": 16,
51
- "linear_value_head_dim": 128,
52
- "mamba_ssm_dtype": "float32",
53
- "max_position_embeddings": 262144,
54
- "mlp_only_layers": [],
55
- "model_type": "qwen3_5_text",
56
- "mtp_num_hidden_layers": 1,
57
- "mtp_use_dedicated_embeddings": false,
58
- "num_attention_heads": 8,
59
- "num_hidden_layers": 24,
60
- "num_key_value_heads": 2,
61
- "pad_token_id": null,
62
  "partial_rotary_factor": 0.25,
63
- "rms_norm_eps": 1e-06,
64
- "rope_parameters": {
65
- "mrope_interleaved": true,
66
- "mrope_section": [
67
- 11,
68
- 11,
69
- 10
70
- ],
71
- "partial_rotary_factor": 0.25,
72
- "rope_theta": 10000000,
73
- "rope_type": "default"
74
- },
75
- "tie_word_embeddings": true,
76
- "use_cache": true,
77
- "vocab_size": 248320
78
  },
79
  "tie_word_embeddings": true,
80
- "transformers_version": "5.3.0",
81
- "use_cache": false,
82
- "video_token_id": 248057,
83
- "vision_config": {
84
- "deepstack_visual_indexes": [],
85
- "depth": 24,
86
- "dtype": "bfloat16",
87
- "hidden_act": "gelu_pytorch_tanh",
88
- "hidden_size": 1024,
89
- "in_channels": 3,
90
- "initializer_range": 0.02,
91
- "intermediate_size": 4096,
92
- "model_type": "qwen3_5",
93
- "num_heads": 16,
94
- "num_position_embeddings": 2304,
95
- "out_hidden_size": 2048,
96
- "patch_size": 16,
97
- "spatial_merge_size": 2,
98
- "temporal_patch_size": 2
99
- },
100
- "vision_end_token_id": 248054,
101
- "vision_start_token_id": 248053
102
- }
 
1
  {
2
  "architectures": [
3
+ "Qwen3_5ForCausalLM"
4
  ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "attn_output_gate": true,
8
+ "bos_token_id": null,
9
  "dtype": "bfloat16",
10
+ "eos_token_id": 248044,
11
+ "full_attention_interval": 4,
12
+ "head_dim": 256,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 1024,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 3584,
17
+ "layer_types": [
18
+ "linear_attention",
19
+ "linear_attention",
20
+ "linear_attention",
21
+ "full_attention",
22
+ "linear_attention",
23
+ "linear_attention",
24
+ "linear_attention",
25
+ "full_attention",
26
+ "linear_attention",
27
+ "linear_attention",
28
+ "linear_attention",
29
+ "full_attention",
30
+ "linear_attention",
31
+ "linear_attention",
32
+ "linear_attention",
33
+ "full_attention",
34
+ "linear_attention",
35
+ "linear_attention",
36
+ "linear_attention",
37
+ "full_attention",
38
+ "linear_attention",
39
+ "linear_attention",
40
+ "linear_attention",
41
+ "full_attention"
42
+ ],
43
+ "linear_conv_kernel_dim": 4,
44
+ "linear_key_head_dim": 128,
45
+ "linear_num_key_heads": 16,
46
+ "linear_num_value_heads": 16,
47
+ "linear_value_head_dim": 128,
48
+ "mamba_ssm_dtype": "float32",
49
+ "max_position_embeddings": 262144,
50
+ "mlp_only_layers": [],
51
  "model_type": "qwen3_5",
52
+ "mtp_num_hidden_layers": 1,
53
+ "mtp_use_dedicated_embeddings": false,
54
+ "num_attention_heads": 8,
55
+ "num_hidden_layers": 24,
56
+ "num_key_value_heads": 2,
57
+ "pad_token_id": null,
58
+ "partial_rotary_factor": 0.25,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_parameters": {
61
+ "mrope_interleaved": true,
62
+ "mrope_section": [
63
+ 11,
64
+ 11,
65
+ 10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  "partial_rotary_factor": 0.25,
68
+ "rope_theta": 10000000,
69
+ "rope_type": "default"
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  },
71
  "tie_word_embeddings": true,
72
+ "transformers_version": "5.5.4",
73
+ "use_cache": true,
74
+ "vocab_size": 248320
75
+ }