QomSSLab commited on
Commit
ad191d7
·
verified ·
1 Parent(s): 7656ff5

Upload Gemma3ForCausalLM

Browse files
config.json CHANGED
@@ -1,111 +1,86 @@
1
  {
 
2
  "architectures": [
3
- "Gemma3ForConditionalGeneration"
4
  ],
5
- "boi_token_index": 255999,
 
 
 
6
  "dtype": "bfloat16",
7
- "eoi_token_index": 256000,
8
- "eos_token_id": [
9
- 1,
10
- 106
11
- ],
12
- "image_token_index": 262144,
13
  "initializer_range": 0.02,
14
- "mm_tokens_per_image": 256,
15
- "model_type": "gemma3",
16
- "text_config": {
17
- "_sliding_window_pattern": 6,
18
- "attention_bias": false,
19
- "attention_dropout": 0.0,
20
- "attn_logit_softcapping": null,
21
- "dtype": "bfloat16",
22
- "final_logit_softcapping": null,
23
- "head_dim": 256,
24
- "hidden_activation": "gelu_pytorch_tanh",
25
- "hidden_size": 3840,
26
- "initializer_range": 0.02,
27
- "intermediate_size": 15360,
28
- "layer_types": [
29
- "sliding_attention",
30
- "sliding_attention",
31
- "sliding_attention",
32
- "sliding_attention",
33
- "sliding_attention",
34
- "full_attention",
35
- "sliding_attention",
36
- "sliding_attention",
37
- "sliding_attention",
38
- "sliding_attention",
39
- "sliding_attention",
40
- "full_attention",
41
- "sliding_attention",
42
- "sliding_attention",
43
- "sliding_attention",
44
- "sliding_attention",
45
- "sliding_attention",
46
- "full_attention",
47
- "sliding_attention",
48
- "sliding_attention",
49
- "sliding_attention",
50
- "sliding_attention",
51
- "sliding_attention",
52
- "full_attention",
53
- "sliding_attention",
54
- "sliding_attention",
55
- "sliding_attention",
56
- "sliding_attention",
57
- "sliding_attention",
58
- "full_attention",
59
- "sliding_attention",
60
- "sliding_attention",
61
- "sliding_attention",
62
- "sliding_attention",
63
- "sliding_attention",
64
- "full_attention",
65
- "sliding_attention",
66
- "sliding_attention",
67
- "sliding_attention",
68
- "sliding_attention",
69
- "sliding_attention",
70
- "full_attention",
71
- "sliding_attention",
72
- "sliding_attention",
73
- "sliding_attention",
74
- "sliding_attention",
75
- "sliding_attention",
76
- "full_attention"
77
- ],
78
- "max_position_embeddings": 131072,
79
- "model_type": "gemma3_text",
80
- "num_attention_heads": 16,
81
- "num_hidden_layers": 48,
82
- "num_key_value_heads": 8,
83
- "query_pre_attn_scalar": 256,
84
- "rms_norm_eps": 1e-06,
85
- "rope_local_base_freq": 10000.0,
86
- "rope_scaling": {
87
- "factor": 8.0,
88
- "rope_type": "linear"
89
- },
90
- "rope_theta": 1000000.0,
91
- "sliding_window": 1024,
92
- "use_cache": true,
93
- "vocab_size": 262208
94
  },
 
 
95
  "transformers_version": "4.56.0",
96
- "vision_config": {
97
- "attention_dropout": 0.0,
98
- "dtype": "bfloat16",
99
- "hidden_act": "gelu_pytorch_tanh",
100
- "hidden_size": 1152,
101
- "image_size": 896,
102
- "intermediate_size": 4304,
103
- "layer_norm_eps": 1e-06,
104
- "model_type": "siglip_vision_model",
105
- "num_attention_heads": 16,
106
- "num_channels": 3,
107
- "num_hidden_layers": 27,
108
- "patch_size": 14,
109
- "vision_use_head": false
110
- }
111
  }
 
1
  {
2
+ "_sliding_window_pattern": 6,
3
  "architectures": [
4
+ "Gemma3ForCausalLM"
5
  ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_logit_softcapping": null,
9
+ "bos_token_id": 2,
10
  "dtype": "bfloat16",
11
+ "eos_token_id": 1,
12
+ "final_logit_softcapping": null,
13
+ "head_dim": 256,
14
+ "hidden_activation": "gelu_pytorch_tanh",
15
+ "hidden_size": 3840,
 
16
  "initializer_range": 0.02,
17
+ "intermediate_size": 15360,
18
+ "layer_types": [
19
+ "sliding_attention",
20
+ "sliding_attention",
21
+ "sliding_attention",
22
+ "sliding_attention",
23
+ "sliding_attention",
24
+ "full_attention",
25
+ "sliding_attention",
26
+ "sliding_attention",
27
+ "sliding_attention",
28
+ "sliding_attention",
29
+ "sliding_attention",
30
+ "full_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "sliding_attention",
34
+ "sliding_attention",
35
+ "sliding_attention",
36
+ "full_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "sliding_attention",
41
+ "sliding_attention",
42
+ "full_attention",
43
+ "sliding_attention",
44
+ "sliding_attention",
45
+ "sliding_attention",
46
+ "sliding_attention",
47
+ "sliding_attention",
48
+ "full_attention",
49
+ "sliding_attention",
50
+ "sliding_attention",
51
+ "sliding_attention",
52
+ "sliding_attention",
53
+ "sliding_attention",
54
+ "full_attention",
55
+ "sliding_attention",
56
+ "sliding_attention",
57
+ "sliding_attention",
58
+ "sliding_attention",
59
+ "sliding_attention",
60
+ "full_attention",
61
+ "sliding_attention",
62
+ "sliding_attention",
63
+ "sliding_attention",
64
+ "sliding_attention",
65
+ "sliding_attention",
66
+ "full_attention"
67
+ ],
68
+ "max_position_embeddings": 131072,
69
+ "model_type": "gemma3_text",
70
+ "num_attention_heads": 16,
71
+ "num_hidden_layers": 48,
72
+ "num_key_value_heads": 8,
73
+ "pad_token_id": 0,
74
+ "query_pre_attn_scalar": 256,
75
+ "rms_norm_eps": 1e-06,
76
+ "rope_local_base_freq": 10000.0,
77
+ "rope_scaling": {
78
+ "factor": 8.0,
79
+ "rope_type": "linear"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  },
81
+ "rope_theta": 1000000.0,
82
+ "sliding_window": 1024,
83
  "transformers_version": "4.56.0",
84
+ "use_cache": true,
85
+ "vocab_size": 262208
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  }
model-00001-of-00005.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4847447e92599833e8dbaa3067cd201c3bb5c052efa91f11ba891e43234f7832
3
- size 4979902192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a4c14f38ffc9e23d873fa999eea1fc965dcedcec267e7a99f07c6fdbe8760d8
3
+ size 4915892992
model-00002-of-00005.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:891bd54eed03cba9ee1e705533a02a8217fcc29f356e4a1f53e5fd0d178883ad
3
- size 4931296592
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0335ce336b31e38c581326acb77ff28330c2073764b1924ad7108d788b56ba68
3
+ size 4931294472
model-00003-of-00005.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7cee411d9d57324e50ce064a192cc5a858276d508611b12fc599e0c9767112e0
3
- size 4931296656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0d6a60cf6a1708be0613ab7c4d10457ad99b98c02e40ebd5f320fe02aa0d50c
3
+ size 4931294528
model-00004-of-00005.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8bc75a29a730c9e743cad013feda3b0991a913fafe787c58a1c6e20afad97723
3
- size 4931296656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8b76dd5c3b1bb971354de059db455aac1c59fea624bf8a0289e37557ff43261
3
+ size 4931294528
model-00005-of-00005.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed14bd4908c98fed9f61e8cd410167e0846de9abd78e0452ab092072e5d9252d
3
- size 4601000928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d9c549d5bc84e50ecc0c62c8bf5edebd5036ac83bceabccf75b83689f58f370
3
+ size 3822364808
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff