| { | |
| "base_model_name_or_path": "gpt2-large", | |
| "encoder_hidden_size": 1280, | |
| "inference_mode": true, | |
| "num_attention_heads": 20, | |
| "num_layers": 36, | |
| "num_transformer_submodules": 1, | |
| "num_virtual_tokens": 20, | |
| "peft_type": "PREFIX_TUNING", | |
| "prefix_projection": false, | |
| "task_type": "CAUSAL_LM", | |
| "token_dim": 1280 | |
| } |