File size: 604 Bytes
de0ad1c eb12a54 de0ad1c eb12a54 de0ad1c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 | mixed_precision: bf16
stage1:
_target_: lofa.hyper_modulator_mask.TransformerV2
context_dim: 4096
d_model: 768
n_heads: 12
n_blocks: 8
patch_size: 96
num_type: 8
lora_rank: 32
num_depth: 30
factorized: false
qk_norm: true
use_cls: true
decode_cls: false
tau: 0.02
temperature: 0.1
out_act: softplus
stage2:
_target_: lofa.hyper_modulator_transformer.TransformerV2_stage2
context_dim: 4096
d_model: 768
n_heads: 12
n_blocks: 8
patch_size: 96
num_type: 8
lora_rank: 32
num_depth: 30
factorized: false
qk_norm: true
use_cls: true
decode_cls: false |