File size: 8,541 Bytes
6882d00 04c1dad 6882d00 04c1dad 6882d00 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 |
{
"architectures": [
"GraniteMoeHybridForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"attention_multiplier": 0.0078125,
"bos_token_id": 100257,
"dtype": "bfloat16",
"embedding_multiplier": 12,
"eos_token_id": 100257,
"hidden_act": "silu",
"hidden_size": 1536,
"initializer_range": 0.1,
"intermediate_size": 512,
"layer_types": [
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"attention",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"attention",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"attention",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"attention",
"mamba",
"mamba",
"mamba",
"mamba"
],
"logits_scaling": 6,
"mamba_chunk_size": 256,
"mamba_conv_bias": true,
"mamba_d_conv": 4,
"mamba_d_head": 64,
"mamba_d_state": 128,
"mamba_expand": 2,
"mamba_n_groups": 1,
"mamba_n_heads": 48,
"mamba_proj_bias": false,
"max_position_embeddings": 131072,
"model_type": "granitemoehybrid",
"normalization_function": "rmsnorm",
"num_attention_heads": 12,
"num_experts_per_tok": 6,
"num_hidden_layers": 40,
"num_key_value_heads": 4,
"num_local_experts": 64,
"output_router_logits": false,
"pad_token_id": 100256,
"position_embedding_type": "nope",
"quantization_config": {
"config_groups": {
"group_0": {
"format": "float-quantized",
"input_activations": {
"actorder": null,
"block_structure": null,
"dynamic": true,
"group_size": 128,
"num_bits": 8,
"observer": null,
"observer_kwargs": {},
"strategy": "group",
"symmetric": true,
"type": "float"
},
"output_activations": null,
"targets": [
"Linear"
],
"weights": {
"actorder": null,
"block_structure": [
128,
128
],
"dynamic": false,
"group_size": null,
"num_bits": 8,
"observer": "minmax",
"observer_kwargs": {},
"strategy": "block",
"symmetric": true,
"type": "float"
}
}
},
"format": "float-quantized",
"global_compression_ratio": null,
"ignore": [
"model.layers.0.block_sparse_moe.router.layer",
"model.layers.0.shared_mlp.input_linear",
"model.layers.0.mixer.in_proj",
"model.layers.1.block_sparse_moe.router.layer",
"model.layers.1.shared_mlp.input_linear",
"model.layers.1.mixer.in_proj",
"model.layers.2.block_sparse_moe.router.layer",
"model.layers.2.shared_mlp.input_linear",
"model.layers.2.mixer.in_proj",
"model.layers.3.block_sparse_moe.router.layer",
"model.layers.3.shared_mlp.input_linear",
"model.layers.3.mixer.in_proj",
"model.layers.4.block_sparse_moe.router.layer",
"model.layers.4.shared_mlp.input_linear",
"model.layers.4.mixer.in_proj",
"model.layers.5.block_sparse_moe.router.layer",
"model.layers.5.shared_mlp.input_linear",
"model.layers.6.block_sparse_moe.router.layer",
"model.layers.6.shared_mlp.input_linear",
"model.layers.6.mixer.in_proj",
"model.layers.7.block_sparse_moe.router.layer",
"model.layers.7.shared_mlp.input_linear",
"model.layers.7.mixer.in_proj",
"model.layers.8.block_sparse_moe.router.layer",
"model.layers.8.shared_mlp.input_linear",
"model.layers.8.mixer.in_proj",
"model.layers.9.block_sparse_moe.router.layer",
"model.layers.9.shared_mlp.input_linear",
"model.layers.9.mixer.in_proj",
"model.layers.10.block_sparse_moe.router.layer",
"model.layers.10.shared_mlp.input_linear",
"model.layers.10.mixer.in_proj",
"model.layers.11.block_sparse_moe.router.layer",
"model.layers.11.shared_mlp.input_linear",
"model.layers.11.mixer.in_proj",
"model.layers.12.block_sparse_moe.router.layer",
"model.layers.12.shared_mlp.input_linear",
"model.layers.12.mixer.in_proj",
"model.layers.13.block_sparse_moe.router.layer",
"model.layers.13.shared_mlp.input_linear",
"model.layers.13.mixer.in_proj",
"model.layers.14.block_sparse_moe.router.layer",
"model.layers.14.shared_mlp.input_linear",
"model.layers.14.mixer.in_proj",
"model.layers.15.block_sparse_moe.router.layer",
"model.layers.15.shared_mlp.input_linear",
"model.layers.16.block_sparse_moe.router.layer",
"model.layers.16.shared_mlp.input_linear",
"model.layers.16.mixer.in_proj",
"model.layers.17.block_sparse_moe.router.layer",
"model.layers.17.shared_mlp.input_linear",
"model.layers.17.mixer.in_proj",
"model.layers.18.block_sparse_moe.router.layer",
"model.layers.18.shared_mlp.input_linear",
"model.layers.18.mixer.in_proj",
"model.layers.19.block_sparse_moe.router.layer",
"model.layers.19.shared_mlp.input_linear",
"model.layers.19.mixer.in_proj",
"model.layers.20.block_sparse_moe.router.layer",
"model.layers.20.shared_mlp.input_linear",
"model.layers.20.mixer.in_proj",
"model.layers.21.block_sparse_moe.router.layer",
"model.layers.21.shared_mlp.input_linear",
"model.layers.21.mixer.in_proj",
"model.layers.22.block_sparse_moe.router.layer",
"model.layers.22.shared_mlp.input_linear",
"model.layers.22.mixer.in_proj",
"model.layers.23.block_sparse_moe.router.layer",
"model.layers.23.shared_mlp.input_linear",
"model.layers.23.mixer.in_proj",
"model.layers.24.block_sparse_moe.router.layer",
"model.layers.24.shared_mlp.input_linear",
"model.layers.24.mixer.in_proj",
"model.layers.25.block_sparse_moe.router.layer",
"model.layers.25.shared_mlp.input_linear",
"model.layers.26.block_sparse_moe.router.layer",
"model.layers.26.shared_mlp.input_linear",
"model.layers.26.mixer.in_proj",
"model.layers.27.block_sparse_moe.router.layer",
"model.layers.27.shared_mlp.input_linear",
"model.layers.27.mixer.in_proj",
"model.layers.28.block_sparse_moe.router.layer",
"model.layers.28.shared_mlp.input_linear",
"model.layers.28.mixer.in_proj",
"model.layers.29.block_sparse_moe.router.layer",
"model.layers.29.shared_mlp.input_linear",
"model.layers.29.mixer.in_proj",
"model.layers.30.block_sparse_moe.router.layer",
"model.layers.30.shared_mlp.input_linear",
"model.layers.30.mixer.in_proj",
"model.layers.31.block_sparse_moe.router.layer",
"model.layers.31.shared_mlp.input_linear",
"model.layers.31.mixer.in_proj",
"model.layers.32.block_sparse_moe.router.layer",
"model.layers.32.shared_mlp.input_linear",
"model.layers.32.mixer.in_proj",
"model.layers.33.block_sparse_moe.router.layer",
"model.layers.33.shared_mlp.input_linear",
"model.layers.33.mixer.in_proj",
"model.layers.34.block_sparse_moe.router.layer",
"model.layers.34.shared_mlp.input_linear",
"model.layers.34.mixer.in_proj",
"model.layers.35.block_sparse_moe.router.layer",
"model.layers.35.shared_mlp.input_linear",
"model.layers.36.block_sparse_moe.router.layer",
"model.layers.36.shared_mlp.input_linear",
"model.layers.36.mixer.in_proj",
"model.layers.37.block_sparse_moe.router.layer",
"model.layers.37.shared_mlp.input_linear",
"model.layers.37.mixer.in_proj",
"model.layers.38.block_sparse_moe.router.layer",
"model.layers.38.shared_mlp.input_linear",
"model.layers.38.mixer.in_proj",
"model.layers.39.block_sparse_moe.router.layer",
"model.layers.39.shared_mlp.input_linear",
"model.layers.39.mixer.in_proj",
"lm_head"
],
"kv_cache_scheme": null,
"quant_method": "compressed-tensors",
"quantization_status": "compressed",
"sparsity_config": {},
"transform_config": {},
"version": "0.13.0"
},
"residual_multiplier": 0.22,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000,
"router_aux_loss_coef": 0.0,
"shared_intermediate_size": 1024,
"tie_word_embeddings": true,
"transformers_version": "4.57.6",
"use_cache": true,
"vocab_size": 100352
} |