Update transformer/config.json
Browse files- transformer/config.json +2 -2
transformer/config.json
CHANGED
|
@@ -12,8 +12,8 @@
|
|
| 12 |
"in_channels": 64,
|
| 13 |
"joint_attention_dim": 4096,
|
| 14 |
"num_attention_heads": 24,
|
| 15 |
-
"num_layers":
|
| 16 |
-
"num_single_layers":
|
| 17 |
"out_channels": null,
|
| 18 |
"patch_size": 1,
|
| 19 |
"pooled_projection_dim": 768
|
|
|
|
| 12 |
"in_channels": 64,
|
| 13 |
"joint_attention_dim": 4096,
|
| 14 |
"num_attention_heads": 24,
|
| 15 |
+
"num_layers": 1,
|
| 16 |
+
"num_single_layers": 1,
|
| 17 |
"out_channels": null,
|
| 18 |
"patch_size": 1,
|
| 19 |
"pooled_projection_dim": 768
|