Neil Ellis commited on
Commit
73b4931
·
verified ·
1 Parent(s): bfab597

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +221 -0
config.json ADDED
@@ -0,0 +1,221 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5Gemma2ForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 2,
7
+ "classifier_dropout_rate": 0.0,
8
+ "decoder": {
9
+ "_sliding_window_pattern": 6,
10
+ "attention_bias": false,
11
+ "attention_dropout": 0.0,
12
+ "attn_logit_softcapping": null,
13
+ "dropout_rate": 0.0,
14
+ "dtype": "bfloat16",
15
+ "final_logit_softcapping": null,
16
+ "head_dim": 256,
17
+ "hidden_activation": "gelu_pytorch_tanh",
18
+ "hidden_size": 640,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 2048,
21
+ "layer_types": [
22
+ "sliding_attention",
23
+ "sliding_attention",
24
+ "sliding_attention",
25
+ "sliding_attention",
26
+ "sliding_attention",
27
+ "full_attention",
28
+ "sliding_attention",
29
+ "sliding_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "full_attention",
34
+ "sliding_attention",
35
+ "sliding_attention",
36
+ "sliding_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "full_attention"
40
+ ],
41
+ "max_position_embeddings": 32768,
42
+ "model_type": "t5gemma2_decoder",
43
+ "num_attention_heads": 4,
44
+ "num_hidden_layers": 18,
45
+ "num_key_value_heads": 1,
46
+ "query_pre_attn_scalar": 256,
47
+ "rms_norm_eps": 1e-06,
48
+ "rope_parameters": {
49
+ "full_attention": {
50
+ "factor": 8.0,
51
+ "rope_theta": 1000000,
52
+ "rope_type": "linear"
53
+ },
54
+ "sliding_attention": {
55
+ "rope_theta": 10000,
56
+ "rope_type": "default"
57
+ }
58
+ },
59
+ "sliding_window": 512,
60
+ "use_bidirectional_attention": false,
61
+ "use_cache": true,
62
+ "vocab_size": 262144
63
+ },
64
+ "dropout_rate": 0.0,
65
+ "dtype": "bfloat16",
66
+ "encoder": {
67
+ "attention_dropout": 0.0,
68
+ "boi_token_index": 255999,
69
+ "dropout_rate": 0.0,
70
+ "dtype": "bfloat16",
71
+ "eoi_token_index": 256000,
72
+ "image_token_index": 256001,
73
+ "initializer_range": 0.02,
74
+ "mm_tokens_per_image": 256,
75
+ "model_type": "t5gemma2_encoder",
76
+ "text_config": {
77
+ "_name_or_path": "",
78
+ "_sliding_window_pattern": 6,
79
+ "add_cross_attention": false,
80
+ "architectures": null,
81
+ "attention_bias": false,
82
+ "attention_dropout": 0.0,
83
+ "attn_logit_softcapping": null,
84
+ "bos_token_id": 2,
85
+ "chunk_size_feed_forward": 0,
86
+ "cross_attention_hidden_size": null,
87
+ "decoder_start_token_id": null,
88
+ "dropout_rate": 0.0,
89
+ "dtype": "bfloat16",
90
+ "eos_token_id": 1,
91
+ "final_logit_softcapping": null,
92
+ "finetuning_task": null,
93
+ "head_dim": 256,
94
+ "hidden_activation": "gelu_pytorch_tanh",
95
+ "hidden_size": 640,
96
+ "id2label": {
97
+ "0": "LABEL_0",
98
+ "1": "LABEL_1"
99
+ },
100
+ "initializer_range": 0.02,
101
+ "intermediate_size": 2048,
102
+ "is_decoder": false,
103
+ "is_encoder_decoder": false,
104
+ "label2id": {
105
+ "LABEL_0": 0,
106
+ "LABEL_1": 1
107
+ },
108
+ "layer_types": [
109
+ "sliding_attention",
110
+ "sliding_attention",
111
+ "sliding_attention",
112
+ "sliding_attention",
113
+ "sliding_attention",
114
+ "full_attention",
115
+ "sliding_attention",
116
+ "sliding_attention",
117
+ "sliding_attention",
118
+ "sliding_attention",
119
+ "sliding_attention",
120
+ "full_attention",
121
+ "sliding_attention",
122
+ "sliding_attention",
123
+ "sliding_attention",
124
+ "sliding_attention",
125
+ "sliding_attention",
126
+ "full_attention"
127
+ ],
128
+ "max_position_embeddings": 32768,
129
+ "model_type": "t5gemma2_text",
130
+ "num_attention_heads": 4,
131
+ "num_hidden_layers": 18,
132
+ "num_key_value_heads": 1,
133
+ "output_attentions": false,
134
+ "output_hidden_states": false,
135
+ "pad_token_id": 0,
136
+ "prefix": null,
137
+ "problem_type": null,
138
+ "query_pre_attn_scalar": 256,
139
+ "return_dict": true,
140
+ "rms_norm_eps": 1e-06,
141
+ "rope_parameters": {
142
+ "full_attention": {
143
+ "factor": 8.0,
144
+ "rope_theta": 1000000,
145
+ "rope_type": "linear"
146
+ },
147
+ "sliding_attention": {
148
+ "rope_theta": 10000,
149
+ "rope_type": "default"
150
+ }
151
+ },
152
+ "sep_token_id": null,
153
+ "sliding_window": 512,
154
+ "task_specific_params": null,
155
+ "tie_encoder_decoder": false,
156
+ "tie_word_embeddings": true,
157
+ "tokenizer_class": null,
158
+ "use_bidirectional_attention": false,
159
+ "use_cache": true,
160
+ "vocab_size": 262144
161
+ },
162
+ "vision_config": {
163
+ "_name_or_path": "",
164
+ "add_cross_attention": false,
165
+ "architectures": null,
166
+ "attention_dropout": 0.0,
167
+ "bos_token_id": null,
168
+ "chunk_size_feed_forward": 0,
169
+ "cross_attention_hidden_size": null,
170
+ "decoder_start_token_id": null,
171
+ "dropout_rate": 0.0,
172
+ "dtype": "bfloat16",
173
+ "eos_token_id": null,
174
+ "finetuning_task": null,
175
+ "hidden_act": "gelu_pytorch_tanh",
176
+ "hidden_size": 1152,
177
+ "id2label": {
178
+ "0": "LABEL_0",
179
+ "1": "LABEL_1"
180
+ },
181
+ "image_size": 896,
182
+ "intermediate_size": 4304,
183
+ "is_decoder": false,
184
+ "is_encoder_decoder": false,
185
+ "label2id": {
186
+ "LABEL_0": 0,
187
+ "LABEL_1": 1
188
+ },
189
+ "layer_norm_eps": 1e-06,
190
+ "model_type": "siglip_vision_model",
191
+ "num_attention_heads": 16,
192
+ "num_channels": 3,
193
+ "num_hidden_layers": 27,
194
+ "output_attentions": false,
195
+ "output_hidden_states": false,
196
+ "pad_token_id": null,
197
+ "patch_size": 14,
198
+ "prefix": null,
199
+ "problem_type": null,
200
+ "return_dict": true,
201
+ "sep_token_id": null,
202
+ "task_specific_params": null,
203
+ "tie_encoder_decoder": false,
204
+ "tie_word_embeddings": true,
205
+ "tokenizer_class": null,
206
+ "vision_use_head": false,
207
+ "vocab_size": 262144
208
+ },
209
+ "vocab_size": 262144
210
+ },
211
+ "eoi_token_index": 256000,
212
+ "eos_token_id": 1,
213
+ "image_token_index": 256001,
214
+ "initializer_range": 0.02,
215
+ "is_encoder_decoder": true,
216
+ "model_type": "t5gemma2",
217
+ "pad_token_id": 0,
218
+ "transformers_version": "5.0.0.dev0",
219
+ "use_cache": false,
220
+ "vocab_size": 262144
221
+ }