spikymoth commited on
Commit
7c2e08d
·
verified ·
1 Parent(s): 68bab8a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +87 -3
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "Gemma3ForConditionalGeneration"
4
  ],
5
  "boi_token_index": 255999,
 
6
  "eoi_token_index": 256000,
7
  "eos_token_id": [
8
  1,
@@ -13,28 +14,111 @@
13
  "mm_tokens_per_image": 256,
14
  "model_type": "gemma3",
15
  "text_config": {
 
 
 
 
 
 
16
  "head_dim": 128,
 
17
  "hidden_size": 5376,
 
18
  "intermediate_size": 21504,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  "model_type": "gemma3_text",
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 62,
22
  "num_key_value_heads": 16,
23
  "query_pre_attn_scalar": 168,
 
 
24
  "rope_scaling": {
25
  "factor": 8.0,
26
  "rope_type": "linear"
27
  },
28
- "sliding_window": 1024
 
 
 
 
29
  },
30
- "torch_dtype": "bfloat16",
31
- "transformers_version": "4.50.0.dev0",
32
  "vision_config": {
 
 
 
33
  "hidden_size": 1152,
34
  "image_size": 896,
35
  "intermediate_size": 4304,
 
36
  "model_type": "siglip_vision_model",
37
  "num_attention_heads": 16,
 
38
  "num_hidden_layers": 27,
39
  "patch_size": 14,
40
  "vision_use_head": false
 
3
  "Gemma3ForConditionalGeneration"
4
  ],
5
  "boi_token_index": 255999,
6
+ "dtype": "bfloat16",
7
  "eoi_token_index": 256000,
8
  "eos_token_id": [
9
  1,
 
14
  "mm_tokens_per_image": 256,
15
  "model_type": "gemma3",
16
  "text_config": {
17
+ "_sliding_window_pattern": 6,
18
+ "attention_bias": false,
19
+ "attention_dropout": 0.0,
20
+ "attn_logit_softcapping": null,
21
+ "dtype": "bfloat16",
22
+ "final_logit_softcapping": null,
23
  "head_dim": 128,
24
+ "hidden_activation": "gelu_pytorch_tanh",
25
  "hidden_size": 5376,
26
+ "initializer_range": 0.02,
27
  "intermediate_size": 21504,
28
+ "layer_types": [
29
+ "sliding_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "sliding_attention",
34
+ "full_attention",
35
+ "sliding_attention",
36
+ "sliding_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "full_attention",
41
+ "sliding_attention",
42
+ "sliding_attention",
43
+ "sliding_attention",
44
+ "sliding_attention",
45
+ "sliding_attention",
46
+ "full_attention",
47
+ "sliding_attention",
48
+ "sliding_attention",
49
+ "sliding_attention",
50
+ "sliding_attention",
51
+ "sliding_attention",
52
+ "full_attention",
53
+ "sliding_attention",
54
+ "sliding_attention",
55
+ "sliding_attention",
56
+ "sliding_attention",
57
+ "sliding_attention",
58
+ "full_attention",
59
+ "sliding_attention",
60
+ "sliding_attention",
61
+ "sliding_attention",
62
+ "sliding_attention",
63
+ "sliding_attention",
64
+ "full_attention",
65
+ "sliding_attention",
66
+ "sliding_attention",
67
+ "sliding_attention",
68
+ "sliding_attention",
69
+ "sliding_attention",
70
+ "full_attention",
71
+ "sliding_attention",
72
+ "sliding_attention",
73
+ "sliding_attention",
74
+ "sliding_attention",
75
+ "sliding_attention",
76
+ "full_attention",
77
+ "sliding_attention",
78
+ "sliding_attention",
79
+ "sliding_attention",
80
+ "sliding_attention",
81
+ "sliding_attention",
82
+ "full_attention",
83
+ "sliding_attention",
84
+ "sliding_attention",
85
+ "sliding_attention",
86
+ "sliding_attention",
87
+ "sliding_attention",
88
+ "full_attention",
89
+ "sliding_attention",
90
+ "sliding_attention"
91
+ ],
92
+ "max_position_embeddings": 131072,
93
  "model_type": "gemma3_text",
94
  "num_attention_heads": 32,
95
  "num_hidden_layers": 62,
96
  "num_key_value_heads": 16,
97
  "query_pre_attn_scalar": 168,
98
+ "rms_norm_eps": 1e-06,
99
+ "rope_local_base_freq": 10000.0,
100
  "rope_scaling": {
101
  "factor": 8.0,
102
  "rope_type": "linear"
103
  },
104
+ "rope_theta": 1000000.0,
105
+ "sliding_window": 1024,
106
+ "use_bidirectional_attention": false,
107
+ "use_cache": true,
108
+ "vocab_size": 262208
109
  },
110
+ "transformers_version": "4.57.1",
 
111
  "vision_config": {
112
+ "attention_dropout": 0.0,
113
+ "dtype": "bfloat16",
114
+ "hidden_act": "gelu_pytorch_tanh",
115
  "hidden_size": 1152,
116
  "image_size": 896,
117
  "intermediate_size": 4304,
118
+ "layer_norm_eps": 1e-06,
119
  "model_type": "siglip_vision_model",
120
  "num_attention_heads": 16,
121
+ "num_channels": 3,
122
  "num_hidden_layers": 27,
123
  "patch_size": 14,
124
  "vision_use_head": false