| { |
| "activation_function": "gelu_new", |
| "architectures": [ |
| "GPTNeoForCausalLM" |
| ], |
| "attention_dropout": 0, |
| "attention_layers": [ |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global", |
| "global" |
| ], |
| "attention_types": [ |
| [ |
| [ |
| "global" |
| ], |
| 12 |
| ] |
| ], |
| "embed_dropout": 0, |
| "hidden_size": 768, |
| "initializer_range": 0.02, |
| "intermediate_size": null, |
| "layer_norm_epsilon": 1e-05, |
| "max_position_embeddings": 2048, |
| "eos_token_id" : 0, |
| "bos_token_id" : 0, |
| "pad_token_id" : 1, |
| "model_type": "gpt_neo", |
| "num_heads": 12, |
| "num_layers": 12, |
| "resid_dropout": 0, |
| "summary_activation": null, |
| "summary_first_dropout": 0.1, |
| "summary_proj_to_labels": true, |
| "summary_type": "cls_index", |
| "summary_use_proj": true, |
| "torch_dtype": "float32", |
| "transformers_version": "4.20.0", |
| "use_cache": true, |
| "vocab_size": 63999, |
| "window_size": 256, |
| "task_specific_params": { |
| "text-generation": |
| { |
| "do_sample": false, |
| "max_length": 300, |
| "num_beams": 5, |
| "top_p": 0.95, |
| "repetition_penalty": 3.0, |
| "no_repeat_ngram_size": 3 |
| } |
| } |
| } |
|
|