| clip_type: clip_patchtst | |
| device: cuda:0 | |
| loss_type: CE | |
| text: | |
| coemb_dim: 512 | |
| device: cuda:0 | |
| llm_finetune: frozen | |
| output_type: cls | |
| pretrain_model_dim: 768 | |
| pretrain_model_path: ${LONGCLIP_ROOT} | |
| textemb_hidden_dim: 1024 | |
| ts: | |
| activation: gelu | |
| coemb_dim: 512 | |
| d_ff: 256 | |
| d_model: 64 | |
| device: cuda:0 | |
| dropout: 0.1 | |
| e_layers: 2 | |
| factor: 1 | |
| n_heads: 8 | |
| n_var: 2 | |
| output_attention: true | |
| padding: 0 | |
| patch_len: 32 | |
| pretrain_encoder_path: '' | |
| seq_len: 128 | |
| stride: 32 | |
| type: patchtst_mae_pretrain | |