Gjm1234 commited on
Commit
5e02d17
·
verified ·
1 Parent(s): a0d27c3

Add norm_type to prevent KeyError in remapping

Browse files
Files changed (2) hide show
  1. transformer/config.json +11 -1
  2. vae/config.json +9 -6
transformer/config.json CHANGED
@@ -1 +1,11 @@
1
- {"_class_name": "transformer", "_commit": "placeholder"}
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "Transformer2DModel",
3
+ "attention_head_dim": 64,
4
+ "cross_attention_dim": 1024,
5
+ "in_channels": 4,
6
+ "norm_num_groups": 32,
7
+ "num_attention_heads": 8,
8
+ "num_layers": 16,
9
+ "sample_size": 64,
10
+ "norm_type": "layer_norm"
11
+ }
vae/config.json CHANGED
@@ -1,14 +1,17 @@
1
  {
2
  "_class_name": "AutoencoderKL",
3
- "_diffusers_version": "0.30.0",
4
  "act_fn": "silu",
5
- "block_out_channels": [128, 256, 512, 512],
6
- "down_block_types": ["DownEncoderBlock2D", "DownEncoderBlock2D", "DownEncoderBlock2D", "DownEncoderBlock2D"],
 
 
 
 
7
  "latent_channels": 4,
8
  "norm_num_groups": 32,
9
- "sample_size": 256,
10
  "scaling_factor": 0.18215,
11
- "up_block_types": ["UpDecoderBlock2D", "UpDecoderBlock2D", "UpDecoderBlock2D", "UpDecoderBlock2D"],
12
  "in_channels": 3,
13
- "out_channels": 3
 
14
  }
 
1
  {
2
  "_class_name": "AutoencoderKL",
 
3
  "act_fn": "silu",
4
+ "block_out_channels": [
5
+ 128,
6
+ 256,
7
+ 512,
8
+ 512
9
+ ],
10
  "latent_channels": 4,
11
  "norm_num_groups": 32,
12
+ "sample_size": 512,
13
  "scaling_factor": 0.18215,
 
14
  "in_channels": 3,
15
+ "out_channels": 3,
16
+ "norm_type": "layer_norm"
17
  }