| { |
| "architectures": [ |
| "Plamo2ForCausalLM" |
| ], |
| "attention_window_size": 32768, |
| "auto_map": { |
| "AutoConfig": "modeling_plamo.Plamo2Config", |
| "AutoModelForCausalLM": "modeling_plamo.Plamo2ForCausalLM" |
| }, |
| "bos_token_id": 1, |
| "eos_token_id": 2, |
| "eval_attention_n_bit": null, |
| "eval_mlp_n_bit": null, |
| "fp8_accum_dtype": "bfloat16", |
| "full_attention_idx": [ |
| 0, |
| 2, |
| 4, |
| 6, |
| 8, |
| 10, |
| 12, |
| 14, |
| 16, |
| 18, |
| 20, |
| 22, |
| 24, |
| 26, |
| 28, |
| 30 |
| ], |
| "hidden_size": 2048, |
| "hidden_size_per_head": 128, |
| "image_feature_size": null, |
| "image_proj_type": "linear", |
| "image_token_id": null, |
| "intermediate_size": 5632, |
| "linear_type": "normal", |
| "mamba_chunk_size": 256, |
| "mamba_d_conv": 4, |
| "mamba_d_state": 64, |
| "mamba_enabled": true, |
| "mamba_num_heads": 64, |
| "mamba_step": 2, |
| "max_position_embeddings": 32768, |
| "model_type": "plamo2", |
| "num_attention_heads": 32, |
| "num_hidden_layers": 32, |
| "num_key_value_heads": 4, |
| "pad_token_id": 3, |
| "rms_norm_eps": 0.000001, |
| "rope_local_theta": 1000000.0, |
| "rope_theta": 1000000.0, |
| "sliding_window": 32768, |
| "tokenizer_class": "Plamo2Tokenizer", |
| "torch_dtype": "bfloat16", |
| "transformers_version": "4.46.3", |
| "use_cache": false, |
| "vocab_size": 100000 |
| } |
|
|