| { | |
| "_class_name": "PriorTransformer", | |
| "_diffusers_version": "0.21.0.dev0", | |
| "added_emb_type": "prd", | |
| "additional_embeddings": 4, | |
| "attention_head_dim": 12, | |
| "clip_embed_dim": null, | |
| "dropout": 0.0, | |
| "embedding_dim": 32, | |
| "embedding_proj_dim": null, | |
| "embedding_proj_norm_type": null, | |
| "encoder_hid_proj_type": "linear", | |
| "norm_in_type": null, | |
| "num_attention_heads": 2, | |
| "num_embeddings": 77, | |
| "num_layers": 1, | |
| "time_embed_act_fn": "silu", | |
| "time_embed_dim": null | |
| } | |