{ "architectures": [ "GPT2LMHeadModel" ], "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257, "bos_token_id": 50256, "eos_token_id": 50256, "pad_token_id": 50256, "activation_function": "gelu_new", "initializer_range": 0.02, "layer_norm_epsilon": 1e-5, "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "model_type": "gpt2" }