| { | |
| "architectures": [ | |
| "LightningSpeechV2Model" | |
| ], | |
| "bos_token_id": 1, | |
| "encoder_config": { | |
| "attn_dropout": 0.0, | |
| "attn_flash": true, | |
| "conv_dropout": 0.0, | |
| "conv_kernel_size": 5, | |
| "cross_attention": false, | |
| "depth": 16, | |
| "dim_head": 64, | |
| "ff_dropout": 0.0, | |
| "ff_mult": 4, | |
| "heads": 16 | |
| }, | |
| "eos_token_id": 2, | |
| "hidden_size": 1024, | |
| "injection_layers": [ | |
| 7, | |
| 9, | |
| 11, | |
| 13 | |
| ], | |
| "model_type": "lightning_speech_v2", | |
| "num_semantic_tokens": 1024, | |
| "pad_token_id": 0, | |
| "quantizer_drop_prob": 0.75, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.42.3" | |
| } | |