little-lake-studios commited on
Commit
1ecded2
·
verified ·
1 Parent(s): 5d6fc0b

Upload 1 files with hf_hub

Browse files
Files changed (1) hide show
  1. text_encoder/config.json +186 -0
text_encoder/config.json ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 128000,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2_5_vl",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "quantization_config": {
20
+ "_load_in_4bit": true,
21
+ "_load_in_8bit": false,
22
+ "bnb_4bit_compute_dtype": "bfloat16",
23
+ "bnb_4bit_quant_storage": "uint8",
24
+ "bnb_4bit_quant_type": "nf4",
25
+ "bnb_4bit_use_double_quant": false,
26
+ "llm_int8_enable_fp32_cpu_offload": false,
27
+ "llm_int8_has_fp16_weight": false,
28
+ "llm_int8_skip_modules": [
29
+ "transformer_blocks.0.img_mod",
30
+ "transformer_blocks.33.img_mod",
31
+ "transformer_blocks.58.attn.to_k",
32
+ "transformer_blocks.59.attn.to_out",
33
+ "time_text_embed",
34
+ "img_in",
35
+ "txt_in",
36
+ "transformer_blocks.0.img_mod.1",
37
+ "transformer_blocks.0.attn.to_q",
38
+ "transformer_blocks.0.attn.to_k",
39
+ "transformer_blocks.0.attn.to_v",
40
+ "transformer_blocks.0.attn.add_k_proj",
41
+ "transformer_blocks.0.attn.add_v_proj",
42
+ "transformer_blocks.0.attn.add_q_proj",
43
+ "transformer_blocks.0.attn.to_out.0",
44
+ "transformer_blocks.0.attn.to_add_out",
45
+ "transformer_blocks.0.img_mlp.net.0.proj",
46
+ "transformer_blocks.0.img_mlp.net.2",
47
+ "transformer_blocks.0.txt_mod.1",
48
+ "transformer_blocks.0.txt_mlp.net.0.proj",
49
+ "transformer_blocks.0.txt_mlp.net.2",
50
+ "transformer_blocks.59.img_mod.1",
51
+ "transformer_blocks.59.attn.to_q",
52
+ "transformer_blocks.59.attn.to_k",
53
+ "transformer_blocks.59.attn.to_v",
54
+ "transformer_blocks.59.attn.add_k_proj",
55
+ "transformer_blocks.59.attn.add_v_proj",
56
+ "transformer_blocks.59.attn.add_q_proj",
57
+ "transformer_blocks.59.attn.to_out.0",
58
+ "transformer_blocks.59.attn.to_add_out",
59
+ "transformer_blocks.59.img_mlp.net.0.proj",
60
+ "transformer_blocks.59.img_mlp.net.2",
61
+ "transformer_blocks.59.txt_mod.1",
62
+ "transformer_blocks.59.txt_mlp.net.0.proj",
63
+ "transformer_blocks.59.txt_mlp.net.2",
64
+ "norm_out.linear",
65
+ "proj_out"
66
+ ],
67
+ "llm_int8_threshold": 6.0,
68
+ "load_in_4bit": true,
69
+ "load_in_8bit": false,
70
+ "quant_method": "bitsandbytes"
71
+ },
72
+ "rms_norm_eps": 1e-06,
73
+ "rope_parameters": {
74
+ "mrope_section": [
75
+ 16,
76
+ 24,
77
+ 24
78
+ ],
79
+ "rope_type": "default",
80
+ "type": "default"
81
+ },
82
+ "rope_theta": 1000000.0,
83
+ "sliding_window": 32768,
84
+ "text_config": {
85
+ "_name_or_path": "/workspace/hf/hub/models--Qwen--Qwen-Image/snapshots/75e0b4be04f60ec59a75f475837eced720f823b6/text_encoder",
86
+ "architectures": [
87
+ "Qwen2_5_VLForConditionalGeneration"
88
+ ],
89
+ "attention_dropout": 0.0,
90
+ "bos_token_id": 151643,
91
+ "dtype": "bfloat16",
92
+ "eos_token_id": 151645,
93
+ "hidden_act": "silu",
94
+ "hidden_size": 3584,
95
+ "image_token_id": 151655,
96
+ "initializer_range": 0.02,
97
+ "intermediate_size": 18944,
98
+ "layer_types": [
99
+ "full_attention",
100
+ "full_attention",
101
+ "full_attention",
102
+ "full_attention",
103
+ "full_attention",
104
+ "full_attention",
105
+ "full_attention",
106
+ "full_attention",
107
+ "full_attention",
108
+ "full_attention",
109
+ "full_attention",
110
+ "full_attention",
111
+ "full_attention",
112
+ "full_attention",
113
+ "full_attention",
114
+ "full_attention",
115
+ "full_attention",
116
+ "full_attention",
117
+ "full_attention",
118
+ "full_attention",
119
+ "full_attention",
120
+ "full_attention",
121
+ "full_attention",
122
+ "full_attention",
123
+ "full_attention",
124
+ "full_attention",
125
+ "full_attention",
126
+ "full_attention"
127
+ ],
128
+ "max_position_embeddings": 128000,
129
+ "max_window_layers": 28,
130
+ "model_type": "qwen2_5_vl_text",
131
+ "num_attention_heads": 28,
132
+ "num_hidden_layers": 28,
133
+ "num_key_value_heads": 4,
134
+ "rms_norm_eps": 1e-06,
135
+ "rope_parameters": {
136
+ "mrope_section": [
137
+ 16,
138
+ 24,
139
+ 24
140
+ ],
141
+ "rope_theta": 1000000.0,
142
+ "rope_type": "default",
143
+ "type": "default"
144
+ },
145
+ "rope_theta": 1000000.0,
146
+ "sliding_window": null,
147
+ "use_cache": true,
148
+ "use_sliding_window": false,
149
+ "video_token_id": 151656,
150
+ "vision_end_token_id": 151653,
151
+ "vision_start_token_id": 151652,
152
+ "vision_token_id": 151654,
153
+ "vocab_size": 152064
154
+ },
155
+ "tie_word_embeddings": false,
156
+ "transformers_version": "5.0.0.dev0",
157
+ "use_cache": true,
158
+ "use_sliding_window": false,
159
+ "vision_config": {
160
+ "depth": 32,
161
+ "dtype": "bfloat16",
162
+ "fullatt_block_indexes": [
163
+ 7,
164
+ 15,
165
+ 23,
166
+ 31
167
+ ],
168
+ "hidden_act": "silu",
169
+ "hidden_size": 1280,
170
+ "in_channels": 3,
171
+ "in_chans": 3,
172
+ "initializer_range": 0.02,
173
+ "intermediate_size": 3420,
174
+ "model_type": "qwen2_5_vl",
175
+ "num_heads": 16,
176
+ "out_hidden_size": 3584,
177
+ "patch_size": 14,
178
+ "spatial_merge_size": 2,
179
+ "spatial_patch_size": 14,
180
+ "temporal_patch_size": 2,
181
+ "tokens_per_second": 2,
182
+ "window_size": 112
183
+ },
184
+ "vision_token_id": 151654,
185
+ "vocab_size": 152064
186
+ }