woodchen7 commited on
Commit
18bd225
·
verified ·
1 Parent(s): 7b947fd

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +154 -0
config.json ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 2048,
10
+ "image_token_id": 151655,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "max_position_embeddings": 128000,
14
+ "max_window_layers": 70,
15
+ "model_type": "qwen2_5_vl",
16
+ "num_attention_heads": 16,
17
+ "num_hidden_layers": 36,
18
+ "num_key_value_heads": 2,
19
+ "quantization_config": {
20
+ "activation_scheme": "dynamic",
21
+ "ignored_layers": [
22
+ "model.visual.patch_embed.proj",
23
+ "model.lm_head",
24
+ "model.language_model.embed_tokens",
25
+ "model.visual.merger.mlp.0",
26
+ "model.visual.merger.mlp.2",
27
+ "lm_head"
28
+ ],
29
+ "quant_method": "fp8"
30
+ },
31
+ "rms_norm_eps": 1e-06,
32
+ "rope_scaling": {
33
+ "mrope_section": [
34
+ 16,
35
+ 24,
36
+ 24
37
+ ],
38
+ "rope_type": "default",
39
+ "type": "default"
40
+ },
41
+ "rope_theta": 1000000.0,
42
+ "sliding_window": 32768,
43
+ "text_config": {
44
+ "architectures": [
45
+ "Qwen2_5_VLForConditionalGeneration"
46
+ ],
47
+ "attention_dropout": 0.0,
48
+ "bos_token_id": 151643,
49
+ "eos_token_id": 151645,
50
+ "hidden_act": "silu",
51
+ "hidden_size": 2048,
52
+ "image_token_id": null,
53
+ "initializer_range": 0.02,
54
+ "intermediate_size": 11008,
55
+ "layer_types": [
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention",
72
+ "full_attention",
73
+ "full_attention",
74
+ "full_attention",
75
+ "full_attention",
76
+ "full_attention",
77
+ "full_attention",
78
+ "full_attention",
79
+ "full_attention",
80
+ "full_attention",
81
+ "full_attention",
82
+ "full_attention",
83
+ "full_attention",
84
+ "full_attention",
85
+ "full_attention",
86
+ "full_attention",
87
+ "full_attention",
88
+ "full_attention",
89
+ "full_attention",
90
+ "full_attention",
91
+ "full_attention"
92
+ ],
93
+ "max_position_embeddings": 128000,
94
+ "max_window_layers": 70,
95
+ "model_type": "qwen2_5_vl_text",
96
+ "num_attention_heads": 16,
97
+ "num_hidden_layers": 36,
98
+ "num_key_value_heads": 2,
99
+ "rms_norm_eps": 1e-06,
100
+ "rope_scaling": {
101
+ "mrope_section": [
102
+ 16,
103
+ 24,
104
+ 24
105
+ ],
106
+ "rope_type": "default",
107
+ "type": "default"
108
+ },
109
+ "rope_theta": 1000000.0,
110
+ "sliding_window": null,
111
+ "tie_word_embeddings": true,
112
+ "torch_dtype": "bfloat16",
113
+ "use_cache": true,
114
+ "use_sliding_window": false,
115
+ "video_token_id": null,
116
+ "vision_end_token_id": 151653,
117
+ "vision_start_token_id": 151652,
118
+ "vision_token_id": 151654,
119
+ "vocab_size": 151936
120
+ },
121
+ "torch_dtype": "bfloat16",
122
+ "transformers_version": "4.53.3",
123
+ "use_cache": false,
124
+ "use_sliding_window": false,
125
+ "video_token_id": 151656,
126
+ "vision_config": {
127
+ "depth": 32,
128
+ "fullatt_block_indexes": [
129
+ 7,
130
+ 15,
131
+ 23,
132
+ 31
133
+ ],
134
+ "hidden_act": "silu",
135
+ "hidden_size": 1280,
136
+ "in_channels": 3,
137
+ "in_chans": 3,
138
+ "initializer_range": 0.02,
139
+ "intermediate_size": 3420,
140
+ "model_type": "qwen2_5_vl",
141
+ "num_heads": 16,
142
+ "out_hidden_size": 2048,
143
+ "patch_size": 14,
144
+ "spatial_merge_size": 2,
145
+ "spatial_patch_size": 14,
146
+ "temporal_patch_size": 2,
147
+ "tokens_per_second": 2,
148
+ "window_size": 112
149
+ },
150
+ "vision_end_token_id": 151653,
151
+ "vision_start_token_id": 151652,
152
+ "vision_token_id": 151654,
153
+ "vocab_size": 151936
154
+ }