| { | |
| "_class_name": "MaskGitTransformer", | |
| "_version": "0.0.1", | |
| "add_cross_attention": true, | |
| "attention_dropout": 0.0, | |
| "codebook_size": 8192, | |
| "encoder_hidden_size": 1024, | |
| "hidden_dropout": 0.0, | |
| "hidden_size": 1024, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 4096, | |
| "layer_norm_eps": 1e-06, | |
| "mask_token_id": 8255, | |
| "max_position_embeddings": 256, | |
| "norm_type": "rmsnorm", | |
| "num_attention_heads": 16, | |
| "num_classes": null, | |
| "num_hidden_layers": 24, | |
| "num_vq_tokens": 256, | |
| "patch_size": 1, | |
| "project_encoder_hidden_states": false, | |
| "use_bias": false, | |
| "use_codebook_size_for_output": true, | |
| "use_conv_in_out": false, | |
| "use_encoder_layernorm": true, | |
| "use_mlm_layer": true, | |
| "use_mlm_layernorm": true, | |
| "use_normformer": false, | |
| "vocab_size": 8256 | |
| } | |