| { |
| "architectures": [ |
| "ModernBertForSequenceClassification" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "bos_token_id": 50281, |
| "classifier_activation": "gelu", |
| "classifier_bias": false, |
| "classifier_dropout": 0.0, |
| "classifier_pooling": "mean", |
| "cls_token_id": 50281, |
| "decoder_bias": true, |
| "deterministic_flash_attn": false, |
| "dtype": "float32", |
| "embedding_dropout": 0.0, |
| "eos_token_id": 50282, |
| "global_attn_every_n_layers": 3, |
| "gradient_checkpointing": false, |
| "hidden_activation": "gelu", |
| "hidden_size": 768, |
| "id2label": { |
| "0": "abstraction", |
| "1": "agonistic", |
| "2": "agonistic_framing", |
| "3": "analytical_distance", |
| "4": "concrete", |
| "5": "concrete_situational", |
| "6": "direct_address", |
| "7": "formulaic", |
| "8": "formulaic_phrases", |
| "9": "hedging", |
| "10": "hedging_qualification", |
| "11": "literate_feature", |
| "12": "logical_connective", |
| "13": "logical_connectives", |
| "14": "oral_feature", |
| "15": "parallelism", |
| "16": "parataxis", |
| "17": "passive", |
| "18": "passive_agentless", |
| "19": "passive_constructions", |
| "20": "performance_markers", |
| "21": "repetition", |
| "22": "sound_patterns", |
| "23": "subordination", |
| "24": "textual_apparatus" |
| }, |
| "initializer_cutoff_factor": 2.0, |
| "initializer_range": 0.02, |
| "intermediate_size": 1152, |
| "label2id": { |
| "abstraction": 0, |
| "agonistic": 1, |
| "agonistic_framing": 2, |
| "analytical_distance": 3, |
| "concrete": 4, |
| "concrete_situational": 5, |
| "direct_address": 6, |
| "formulaic": 7, |
| "formulaic_phrases": 8, |
| "hedging": 9, |
| "hedging_qualification": 10, |
| "literate_feature": 11, |
| "logical_connective": 12, |
| "logical_connectives": 13, |
| "oral_feature": 14, |
| "parallelism": 15, |
| "parataxis": 16, |
| "passive": 17, |
| "passive_agentless": 18, |
| "passive_constructions": 19, |
| "performance_markers": 20, |
| "repetition": 21, |
| "sound_patterns": 22, |
| "subordination": 23, |
| "textual_apparatus": 24 |
| }, |
| "layer_norm_eps": 1e-05, |
| "layer_types": [ |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention" |
| ], |
| "local_attention": 128, |
| "max_position_embeddings": 8192, |
| "mlp_bias": false, |
| "mlp_dropout": 0.0, |
| "model_type": "modernbert", |
| "norm_bias": false, |
| "norm_eps": 1e-05, |
| "num_attention_heads": 12, |
| "num_hidden_layers": 22, |
| "pad_token_id": 50283, |
| "position_embedding_type": "absolute", |
| "repad_logits_with_grad": false, |
| "rope_parameters": { |
| "full_attention": { |
| "rope_theta": 160000.0, |
| "rope_type": "default" |
| }, |
| "sliding_attention": { |
| "rope_theta": 10000.0, |
| "rope_type": "default" |
| } |
| }, |
| "sep_token_id": 50282, |
| "sparse_pred_ignore_index": -100, |
| "sparse_prediction": false, |
| "tie_word_embeddings": true, |
| "transformers_version": "5.0.0", |
| "vocab_size": 50368 |
| } |
|
|