File size: 3,155 Bytes
24b0fea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
{
    "architectures": [
        "Gemma4ForConditionalGeneration"
    ],
    "audio_config": null,
    "audio_token_id": 258881,
    "boa_token_id": 256000,
    "boi_token_id": 255999,
    "dtype": "bfloat16",
    "eoa_token_id": 258883,
    "eoa_token_index": 258883,
    "eoi_token_id": 258882,
    "eos_token_id": [
        1,
        106,
        50
    ],
    "image_token_id": 258880,
    "initializer_range": 0.02,
    "model_type": "gemma4",
    "text_config": {
        "attention_bias": false,
        "attention_dropout": 0.0,
        "attention_k_eq_v": true,
        "bos_token_id": 2,
        "dtype": "bfloat16",
        "enable_moe_block": true,
        "eos_token_id": 1,
        "final_logit_softcapping": 30.0,
        "global_head_dim": 512,
        "head_dim": 256,
        "hidden_activation": "gelu_pytorch_tanh",
        "hidden_size": 2816,
        "hidden_size_per_layer_input": 0,
        "initializer_range": 0.02,
        "intermediate_size": 2112,
        "layer_types": [
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "full_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "full_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "full_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "full_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "sliding_attention",
            "full_attention"
        ],
        "max_position_embeddings": 262144,
        "model_type": "gemma4_text",
        "moe_intermediate_size": 704,
        "num_attention_heads": 16,
        "num_experts": 128,
        "num_global_key_value_heads": 2,
        "num_hidden_layers": 30,
        "num_key_value_heads": 8,
        "num_kv_shared_layers": 0,
        "pad_token_id": 0,
        "rms_norm_eps": 1e-06,
        "rope_parameters": {
            "full_attention": {
                "partial_rotary_factor": 0.25,
                "rope_theta": 1000000.0,
                "rope_type": "proportional"
            },
            "sliding_attention": {
                "rope_theta": 10000.0,
                "rope_type": "default"
            }
        },
        "sliding_window": 1024,
        "tie_word_embeddings": true,
        "top_k_experts": 8,
        "use_bidirectional_attention": "vision",
        "use_cache": true,
        "use_double_wide_mlp": false,
        "vocab_size": 262144,
        "vocab_size_per_layer_input": 262144
    },
    "tie_word_embeddings": true,
    "transformers_version": "5.5.0.dev0",
    "video_token_id": 258884,
    "vision_soft_tokens_per_image": 280
}