| { | |
| "adaptive": true, | |
| "architectures": [ | |
| "TransfoXLLMHeadModel" | |
| ], | |
| "attn_type": 0, | |
| "clamp_len": 1000, | |
| "cutoffs": [ | |
| 20000, | |
| 40000, | |
| 200000 | |
| ], | |
| "d_embed": 1024, | |
| "d_head": 64, | |
| "d_inner": 4096, | |
| "d_model": 1024, | |
| "div_val": 4, | |
| "dropatt": 0.2, | |
| "dropout": 0.2, | |
| "eos_token_id": 0, | |
| "init": "normal", | |
| "init_range": 0.01, | |
| "init_std": 0.02, | |
| "layer_norm_epsilon": 1e-05, | |
| "mem_len": 512, | |
| "model_type": "transfo-xl", | |
| "n_head": 16, | |
| "n_layer": 18, | |
| "pre_lnorm": false, | |
| "proj_init_std": 0.01, | |
| "same_length": true, | |
| "sample_softmax": -1, | |
| "tie_projs": [ | |
| false, | |
| true, | |
| true, | |
| true | |
| ], | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.42.3", | |
| "untie_r": true, | |
| "vocab_size": 267735 | |
| } | |