File size: 1,277 Bytes
9bf4fe6 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | {
"_class_name": "Transformer3DModel",
"_diffusers_version": "0.33.1",
"activation_fn": "gelu-approximate",
"adaln_norm_type": "layernorm",
"add_motion_score": true,
"attention_bias": true,
"attention_head_dim": 64,
"attention_type": "default",
"caption_channels": null,
"cfg_parallel_group": null,
"class_dropout_prob": 0.1,
"compile_mem_gc_threshold": 60,
"cond_channels": 0,
"condition_type": "inpaint_mask",
"cross_attention_dim": 2048,
"desired_step": false,
"double_self_attention": false,
"dropout": 0.0,
"enable_cached_compile": false,
"enable_cfg_parallel": false,
"gradient_checkpointing": "full-block",
"in_channels": 16,
"norm_elementwise_affine": false,
"norm_eps": 1e-05,
"norm_num_groups": 32,
"norm_type": "ada_norm_zero_nolabel",
"num_attention_heads": 28,
"num_block_chunks": 28,
"num_embeds_ada_norm": null,
"num_latent_frames": 24,
"num_layers": 28,
"only_cross_attention": false,
"out_channels": 16,
"patch_size": 2,
"patch_size_t": 1,
"sample_size": 32,
"sequence_parallel_size": 0,
"task_type": "i2v+++",
"time_embed_type": "rope",
"upcast_attention": false,
"use_depth_cond": true,
"use_depth_loss": false,
"use_navit": false,
"use_rope": true,
"vae_stride": 8
}
|