| { |
| "architectures": [ |
| "ModernBertForSequenceClassification" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "bos_token_id": null, |
| "classifier_activation": "gelu", |
| "classifier_bias": false, |
| "classifier_dropout": 0.0, |
| "classifier_pooling": "mean", |
| "cls_token_id": 50281, |
| "decoder_bias": true, |
| "deterministic_flash_attn": false, |
| "dtype": "float16", |
| "embedding_dropout": 0.0, |
| "eos_token_id": null, |
| "global_attn_every_n_layers": 3, |
| "global_rope_theta": 160000.0, |
| "gradient_checkpointing": false, |
| "hidden_activation": "gelu", |
| "hidden_size": 1024, |
| "id2label": { |
| "0": "All-or-Nothing Thinking", |
| "1": "Emotional Reasoning", |
| "2": "Fortune-telling", |
| "3": "Labeling", |
| "4": "Magnification", |
| "5": "Mental Filter", |
| "6": "Mind Reading", |
| "7": "No Distortion", |
| "8": "Overgeneralization", |
| "9": "Personalization", |
| "10": "Should Statements" |
| }, |
| "initializer_cutoff_factor": 2.0, |
| "initializer_range": 0.02, |
| "intermediate_size": 2624, |
| "label2id": { |
| "All-or-Nothing Thinking": 0, |
| "Emotional Reasoning": 1, |
| "Fortune-telling": 2, |
| "Labeling": 3, |
| "Magnification": 4, |
| "Mental Filter": 5, |
| "Mind Reading": 6, |
| "No Distortion": 7, |
| "Overgeneralization": 8, |
| "Personalization": 9, |
| "Should Statements": 10 |
| }, |
| "layer_norm_eps": 1e-05, |
| "local_attention": 128, |
| "local_rope_theta": 10000.0, |
| "max_position_embeddings": 8192, |
| "mlp_bias": false, |
| "mlp_dropout": 0.0, |
| "model_type": "modernbert", |
| "norm_bias": false, |
| "norm_eps": 1e-05, |
| "num_attention_heads": 16, |
| "num_hidden_layers": 28, |
| "pad_token_id": 50283, |
| "position_embedding_type": "absolute", |
| "problem_type": "single_label_classification", |
| "repad_logits_with_grad": false, |
| "sep_token_id": 50282, |
| "sparse_pred_ignore_index": -100, |
| "sparse_prediction": false, |
| "transformers_version": "4.57.3", |
| "vocab_size": 50368 |
| } |
|
|