| { | |
| "_name_or_path": "gpt2-large", | |
| "activation_function": "gelu_new", | |
| "architectures": [ | |
| "GPT2LMHeadModel" | |
| ], | |
| "attn_pdrop": 0.1, | |
| "bos_token_id": 50256, | |
| "embd_pdrop": 0.1, | |
| "eos_token_id": 50256, | |
| "initializer_range": 0.02, | |
| "layer_norm_epsilon": 1e-05, | |
| "model_type": "gpt2", | |
| "n_ctx": 1024, | |
| "n_embd": 1280, | |
| "n_head": 20, | |
| "n_inner": null, | |
| "n_layer": 36, | |
| "n_positions": 1024, | |
| "output_attentions": true, | |
| "pruned_heads": { | |
| "2": [ | |
| 5 | |
| ], | |
| "5": [ | |
| 4 | |
| ], | |
| "8": [ | |
| 1, | |
| 18, | |
| 19, | |
| 14 | |
| ], | |
| "9": [ | |
| 0, | |
| 18, | |
| 5, | |
| 9, | |
| 10, | |
| 11 | |
| ], | |
| "10": [ | |
| 1, | |
| 18, | |
| 17, | |
| 19, | |
| 15 | |
| ], | |
| "11": [ | |
| 0, | |
| 1, | |
| 18, | |
| 19, | |
| 7, | |
| 12, | |
| 13, | |
| 14 | |
| ], | |
| "12": [ | |
| 1, | |
| 3, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 9, | |
| 11, | |
| 13, | |
| 14, | |
| 18 | |
| ], | |
| "13": [ | |
| 1, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 14, | |
| 15 | |
| ], | |
| "14": [ | |
| 19, | |
| 3, | |
| 15 | |
| ], | |
| "15": [ | |
| 1, | |
| 17, | |
| 4, | |
| 12, | |
| 13 | |
| ], | |
| "16": [ | |
| 0, | |
| 2, | |
| 6, | |
| 9, | |
| 15 | |
| ], | |
| "17": [ | |
| 0, | |
| 4, | |
| 5, | |
| 6, | |
| 11, | |
| 12, | |
| 13, | |
| 14, | |
| 16, | |
| 17, | |
| 19 | |
| ], | |
| "18": [ | |
| 2, | |
| 4, | |
| 7, | |
| 9, | |
| 11, | |
| 12, | |
| 16, | |
| 17, | |
| 19 | |
| ], | |
| "19": [ | |
| 1, | |
| 2, | |
| 4, | |
| 6, | |
| 8, | |
| 11, | |
| 12, | |
| 13, | |
| 15, | |
| 17, | |
| 18 | |
| ], | |
| "20": [ | |
| 0, | |
| 1, | |
| 2, | |
| 4, | |
| 6, | |
| 7, | |
| 8, | |
| 10, | |
| 11, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "21": [ | |
| 1, | |
| 2, | |
| 4, | |
| 5, | |
| 8, | |
| 11, | |
| 12, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "22": [ | |
| 0, | |
| 2, | |
| 3, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 8, | |
| 10, | |
| 12, | |
| 13, | |
| 14, | |
| 16, | |
| 18, | |
| 19 | |
| ], | |
| "23": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 5, | |
| 7, | |
| 8, | |
| 12, | |
| 13, | |
| 14, | |
| 15 | |
| ], | |
| "24": [ | |
| 1, | |
| 5, | |
| 6, | |
| 8, | |
| 10, | |
| 11, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 18, | |
| 19 | |
| ], | |
| "25": [ | |
| 0, | |
| 1, | |
| 3, | |
| 5, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 11, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 18, | |
| 19 | |
| ], | |
| "26": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 4, | |
| 6, | |
| 8, | |
| 9, | |
| 10, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 17 | |
| ], | |
| "27": [ | |
| 0, | |
| 1, | |
| 2, | |
| 4, | |
| 6, | |
| 7, | |
| 9, | |
| 10, | |
| 12, | |
| 13, | |
| 14, | |
| 16, | |
| 17, | |
| 18 | |
| ], | |
| "28": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 12, | |
| 13, | |
| 14, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "29": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 5, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 11, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "30": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 4, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 11, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "31": [ | |
| 0, | |
| 1, | |
| 2, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 11, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "32": [ | |
| 0, | |
| 6, | |
| 7, | |
| 8, | |
| 9, | |
| 10, | |
| 11, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 16, | |
| 17, | |
| 19 | |
| ], | |
| "33": [ | |
| 0, | |
| 1, | |
| 2, | |
| 3, | |
| 4, | |
| 5, | |
| 6, | |
| 7, | |
| 9, | |
| 10, | |
| 12, | |
| 13, | |
| 14, | |
| 15, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "34": [ | |
| 0, | |
| 1, | |
| 3, | |
| 5, | |
| 7, | |
| 9, | |
| 12, | |
| 13, | |
| 14, | |
| 16, | |
| 17, | |
| 18, | |
| 19 | |
| ], | |
| "35": [ | |
| 1 | |
| ] | |
| }, | |
| "reorder_and_upcast_attn": false, | |
| "resid_pdrop": 0.1, | |
| "scale_attn_by_inverse_layer_idx": false, | |
| "scale_attn_weights": true, | |
| "summary_activation": null, | |
| "summary_first_dropout": 0.1, | |
| "summary_proj_to_labels": true, | |
| "summary_type": "cls_index", | |
| "summary_use_proj": true, | |
| "task_specific_params": { | |
| "text-generation": { | |
| "do_sample": true, | |
| "max_length": 50 | |
| } | |
| }, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.36.2", | |
| "use_cache": true, | |
| "vocab_size": 50257 | |
| } | |