{ "architectures": [ "LongcatFlashNgramForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_longcat_ngram.LongcatFlashNgramConfig", "AutoModel": "modeling_longcat_ngram.LongcatFlashNgramModel", "AutoModelForCausalLM": "modeling_longcat_ngram.LongcatFlashNgramForCausalLM" }, "bos_token_id": 1, "emb_neighbor_num": 4, "emb_split_num": 4, "eos_token_id": 2, "expert_ffn_hidden_size": 1024, "ffn_hidden_size": 6144, "hidden_size": 3072, "kv_lora_rank": 512, "max_position_embeddings": 327680, "mla_scale_kv_lora": true, "mla_scale_q_lora": true, "model_type": "longcat_flash_ngram", "moe_topk": 12, "n_routed_experts": 256, "ngram_vocab_size_ratio": 78, "num_attention_heads": 32, "num_layers": 14, "q_lora_rank": 1536, "qk_nope_head_dim": 128, "qk_rope_head_dim": 64, "quantization": { "group_size": 64, "bits": 4, "mode": "affine", "model.layers.0.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.1.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.2.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.3.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.4.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.5.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.6.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.7.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.8.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.9.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.10.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.11.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.12.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.13.mlp.router.classifier": { "group_size": 64, "bits": 8 } }, "quantization_config": { "group_size": 64, "bits": 4, "mode": "affine", "model.layers.0.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.1.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.2.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.3.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.4.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.5.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.6.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.7.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.8.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.9.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.10.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.11.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.12.mlp.router.classifier": { "group_size": 64, "bits": 8 }, "model.layers.13.mlp.router.classifier": { "group_size": 64, "bits": 8 } }, "rms_norm_eps": 1e-05, "rope_scaling": { "original_max_position_embeddings": 32768, "rope_type": "yarn", "factor": 10, "beta_fast": 32, "beta_slow": 1, "mscale": 1, "mscale_all_dim": 1 }, "rope_theta": 5000000.0, "routed_scaling_factor": 6.0, "torch_dtype": "bfloat16", "transformers_version": "4.57.6", "use_cache": true, "v_head_dim": 128, "vocab_size": 131072, "zero_expert_num": 128, "zero_expert_type": "identity" }