lthn commited on
Commit
d0d1409
·
verified ·
1 Parent(s): f291984

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +49 -11
config.json CHANGED
@@ -56,16 +56,6 @@
56
  "image_token_id": 258880,
57
  "initializer_range": 0.02,
58
  "model_type": "gemma4",
59
- "quantization": {
60
- "group_size": 64,
61
- "bits": 4,
62
- "mode": "affine"
63
- },
64
- "quantization_config": {
65
- "group_size": 64,
66
- "bits": 4,
67
- "mode": "affine"
68
- },
69
  "text_config": {
70
  "attention_bias": false,
71
  "attention_dropout": 0.0,
@@ -160,5 +150,53 @@
160
  "tie_word_embeddings": true,
161
  "transformers_version": "5.5.0.dev0",
162
  "video_token_id": 258884,
163
- "vision_soft_tokens_per_image": 280
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  }
 
56
  "image_token_id": 258880,
57
  "initializer_range": 0.02,
58
  "model_type": "gemma4",
 
 
 
 
 
 
 
 
 
 
59
  "text_config": {
60
  "attention_bias": false,
61
  "attention_dropout": 0.0,
 
150
  "tie_word_embeddings": true,
151
  "transformers_version": "5.5.0.dev0",
152
  "video_token_id": 258884,
153
+ "vision_config": {
154
+ "_name_or_path": "",
155
+ "architectures": null,
156
+ "attention_bias": false,
157
+ "attention_dropout": 0.0,
158
+ "chunk_size_feed_forward": 0,
159
+ "default_output_length": 280,
160
+ "dtype": "bfloat16",
161
+ "global_head_dim": 64,
162
+ "head_dim": 64,
163
+ "hidden_activation": "gelu_pytorch_tanh",
164
+ "hidden_size": 768,
165
+ "id2label": {
166
+ "0": "LABEL_0",
167
+ "1": "LABEL_1"
168
+ },
169
+ "initializer_range": 0.02,
170
+ "intermediate_size": 3072,
171
+ "is_encoder_decoder": false,
172
+ "label2id": {
173
+ "LABEL_0": 0,
174
+ "LABEL_1": 1
175
+ },
176
+ "max_position_embeddings": 131072,
177
+ "model_type": "gemma4_vision",
178
+ "num_attention_heads": 12,
179
+ "num_hidden_layers": 16,
180
+ "num_key_value_heads": 12,
181
+ "output_attentions": false,
182
+ "output_hidden_states": false,
183
+ "patch_size": 16,
184
+ "pooling_kernel_size": 3,
185
+ "position_embedding_size": 10240,
186
+ "problem_type": null,
187
+ "return_dict": true,
188
+ "rms_norm_eps": 1e-06,
189
+ "rope_parameters": {
190
+ "rope_theta": 100.0,
191
+ "rope_type": "default"
192
+ },
193
+ "standardize": false,
194
+ "use_clipped_linears": true
195
+ },
196
+ "vision_soft_tokens_per_image": 280,
197
+ "quantization_config": {
198
+ "bits": 4,
199
+ "group_size": 64,
200
+ "mode": "affine"
201
+ }
202
  }