loulou2 commited on
Commit
ce1dfee
·
verified ·
1 Parent(s): f82497d

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +2 -2
transformer/config.json CHANGED
@@ -12,8 +12,8 @@
12
  "in_channels": 64,
13
  "joint_attention_dim": 4096,
14
  "num_attention_heads": 24,
15
- "num_layers": 19,
16
- "num_single_layers": 38,
17
  "out_channels": null,
18
  "patch_size": 1,
19
  "pooled_projection_dim": 768
 
12
  "in_channels": 64,
13
  "joint_attention_dim": 4096,
14
  "num_attention_heads": 24,
15
+ "num_layers": 1,
16
+ "num_single_layers": 1,
17
  "out_channels": null,
18
  "patch_size": 1,
19
  "pooled_projection_dim": 768