File size: 1,384 Bytes
fb088c8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | {
"_class_name": "MaskGiTUViT",
"_name_or_path": "./hf-model/muse-research-run/ema_model/",
"_version": "0.0.1",
"add_cond_embeds": true,
"add_cross_attention": true,
"add_micro_cond_embeds": true,
"attention_dropout": 0.0,
"block_has_attention": [
true
],
"block_num_heads": [
12
],
"block_out_channels": [
768
],
"codebook_size": 8192,
"cond_embed_dim": 768,
"encoder_hidden_size": 768,
"ffn_type": "glu",
"hidden_dropout": 0.0,
"hidden_size": 1024,
"in_channels": 768,
"initializer_range": 0.02,
"intermediate_size": 2816,
"layer_norm_before_mlm": false,
"layer_norm_embedddings": false,
"layer_norm_eps": 1e-06,
"learn_uncond_embeds": false,
"ln_elementwise_affine": true,
"mask_token_id": 8255,
"max_position_embeddings": 256,
"micro_cond_embed_dim": 1280,
"micro_cond_encode_dim": 256,
"norm_type": "rmsnorm",
"num_attention_heads": 16,
"num_classes": null,
"num_hidden_layers": 22,
"num_res_blocks": 3,
"num_vq_tokens": 256,
"patch_size": 1,
"project_encoder_hidden_states": true,
"res_ffn_factor": 4,
"use_bias": false,
"use_codebook_size_for_output": true,
"use_empty_embeds_for_uncond": true,
"use_encoder_layernorm": false,
"use_normformer": false,
"use_position_embeddings": false,
"use_vannilla_resblock": false,
"vocab_size": 8256,
"xavier_init_embed": true
}
|