| { |
| "allow_embedding_resizing": true, |
| "_name_or_path": "ModernBERT-dec-32m", |
| "architectures": [ |
| "ModernBertForCausalLM" |
| ], |
| "attention_layer": "rope", |
| "attention_probs_dropout_prob": 0.0, |
| "attn_out_bias": false, |
| "attn_out_dropout_prob": 0.1, |
| "attn_qkv_bias": false, |
| "bert_layer": "prenorm", |
| "classifier_dropout": null, |
| "compile_model": true, |
| "reference_compile": false, |
| "decoder_bias": true, |
| "deterministic_fa2": true, |
| "embed_dropout_prob": 0.0, |
| "embed_norm": true, |
| "embedding_layer": "sans_pos", |
| "encoder_layer": "base", |
| "final_norm": true, |
| "global_attn_every_n_layers": 3, |
| "gradient_checkpointing": false, |
| "head_class_act": "silu", |
| "head_class_bias": false, |
| "head_class_dropout": 0.0, |
| "head_class_norm": false, |
| "head_pred_act": "gelu", |
| "head_pred_bias": false, |
| "head_pred_dropout": 0.0, |
| "head_pred_norm": true, |
| "hidden_act": "gelu", |
| "hidden_dropout_prob": 0.1, |
| "hidden_size": 384, |
| "init_cutoff_factor": 2.0, |
| "init_method": "full_megatron", |
| "init_small_embedding": false, |
| "init_std": 0.02, |
| "initial_attention_layer": null, |
| "initial_bert_layer": null, |
| "initial_mlp_layer": null, |
| "initializer_range": 0.02, |
| "intermediate_size": 576, |
| "layer_norm_eps": 1e-12, |
| "local_attn_rotary_emb_base": -1, |
| "local_attn_rotary_emb_dim": null, |
| "loss_function": "fa_cross_entropy", |
| "deterministic_flash_attn": false, |
| "loss_kwargs": { |
| "reduction": "mean" |
| }, |
| "masked_prediction": false, |
| "causal_mask": true, |
| "max_position_embeddings": 1024, |
| "mlp_dropout_prob": 0.0, |
| "mlp_in_bias": false, |
| "mlp_layer": "glu", |
| "mlp_out_bias": false, |
| "model_type": "modernbert", |
| "norm_kwargs": { |
| "bias": false, |
| "eps": 1e-05 |
| }, |
| "normalization": "layernorm", |
| "num_attention_heads": 6, |
| "num_hidden_layers": 10, |
| "num_initial_layers": 1, |
| "pad_logits": true, |
| "pad_token_id": null, |
| "padding": "unpadded", |
| "pooling_type": "cls", |
| "position_embedding_type": "absolute", |
| "global_rope_theta": 160000.0, |
| "local_rope_theta": 160000.0, |
| "rotary_emb_base": 160000.0, |
| "rotary_emb_dim": 64, |
| "rotary_emb_interleaved": false, |
| "rotary_emb_scale_base": null, |
| "skip_first_prenorm": true, |
| "sliding_window": 128, |
| "transformers_version": "4.44.1", |
| "type_vocab_size": 2, |
| "unpad_embeddings": true, |
| "use_cache": true, |
| "use_fa2": true, |
| "use_sdpa_attn_mask": false, |
| "vocab_size": 50368, |
| "is_causal": true |
| } |