{ "activation_function": "gelu_new", "architectures": [ "GPT2LMHeadModel" ], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "gradient_accumulation_steps": 16, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "learning_rate": 1e-05, "lr_scheduler_type": "cosine", "max_eval_steps": -1, "max_train_steps": 50000, "model_type": "gpt2", "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "num_warmup_steps": 2000, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "save_checkpoint_steps": 10000, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "seed": 1, "seq_length": 1024, "shuffle_buffer": 1000, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "train_batch_size": 1, "transformers_version": "4.26.0", "use_cache": true, "valid_batch_size": 1, "vocab_size": 50257, "weight_decay": 0.1 }