{ "activation_function": "gelu_new", "architectures": [ "GPTNeoForConditionalCausalLM" ], "attention_dropout": 0, "attention_layers": [ "global", "local", "global", "local" ], "attention_types": [ [ [ "global", "local" ], 2 ] ], "bos_token_id": 50256, "classifier_dropout": 0.1, "dtype": "float32", "embed_dropout": 0, "encoder_config": { "_name_or_path": "answerdotai/ModernBERT-base", "add_cross_attention": false, "architectures": [ "ModernBertForMaskedLM" ], "attention_bias": false, "attention_dropout": 0.0, "bad_words_ids": null, "begin_suppress_tokens": null, "bos_token_id": 50281, "chunk_size_feed_forward": 0, "classifier_activation": "gelu", "classifier_bias": false, "classifier_dropout": 0.0, "classifier_pooling": "mean", "cls_token_id": 50281, "cross_attention_hidden_size": null, "decoder_bias": true, "decoder_start_token_id": null, "deterministic_flash_attn": false, "diversity_penalty": 0.0, "do_sample": false, "dtype": "float32", "early_stopping": false, "embedding_dropout": 0.0, "encoder_no_repeat_ngram_size": 0, "eos_token_id": 50282, "exponential_decay_length_penalty": null, "finetuning_task": null, "forced_bos_token_id": null, "forced_eos_token_id": null, "global_attn_every_n_layers": 3, "global_rope_theta": 160000.0, "gradient_checkpointing": false, "hidden_activation": "gelu", "hidden_size": 768, "id2label": { "0": "LABEL_0", "1": "LABEL_1" }, "initializer_cutoff_factor": 2.0, "initializer_range": 0.02, "intermediate_size": 1152, "is_decoder": false, "is_encoder_decoder": false, "label2id": { "LABEL_0": 0, "LABEL_1": 1 }, "layer_norm_eps": 1e-05, "length_penalty": 1.0, "local_attention": 128, "local_rope_theta": 10000.0, "max_length": 20, "max_position_embeddings": 8192, "min_length": 0, "mlp_bias": false, "mlp_dropout": 0.0, "model_type": "modernbert", "no_repeat_ngram_size": 0, "norm_bias": false, "norm_eps": 1e-05, "num_attention_heads": 12, "num_beam_groups": 1, "num_beams": 1, "num_hidden_layers": 22, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_scores": false, "pad_token_id": 50283, "position_embedding_type": "absolute", "prefix": null, "problem_type": null, "pruned_heads": {}, "remove_invalid_values": false, "repad_logits_with_grad": false, "repetition_penalty": 1.0, "return_dict": true, "return_dict_in_generate": false, "sep_token_id": 50282, "sparse_pred_ignore_index": -100, "sparse_prediction": false, "suppress_tokens": null, "task_specific_params": null, "temperature": 1.0, "tf_legacy_loss": false, "tie_encoder_decoder": false, "tie_word_embeddings": true, "tokenizer_class": null, "top_k": 50, "top_p": 1.0, "torchscript": false, "typical_p": 1.0, "use_bfloat16": false, "vocab_size": 50368 }, "eos_token_id": 50256, "gradient_checkpointing": false, "hidden_size": 768, "initializer_range": 0.02, "intermediate_size": null, "layer_norm_epsilon": 1e-05, "max_position_embeddings": 2048, "model_type": "gpt_neo", "num_heads": 16, "num_layers": 4, "pad_token_id": 50256, "resid_dropout": 0, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "transformers_version": "4.57.3", "use_cache": true, "vocab_size": 50257, "window_size": 256 }