Alex Puliatti commited on
Commit
7231d75
·
verified ·
1 Parent(s): 7117aed

Delete transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +0 -19
transformer/config.json DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "_class_name": "FluxTransformer2DModel",
3
- "_diffusers_version": "0.32.0.dev0",
4
- "attention_head_dim": 128,
5
- "axes_dims_rope": [
6
- 16,
7
- 56,
8
- 56
9
- ],
10
- "guidance_embeds": true,
11
- "in_channels": 384,
12
- "joint_attention_dim": 4096,
13
- "num_attention_heads": 24,
14
- "num_layers": 19,
15
- "num_single_layers": 38,
16
- "out_channels": 64,
17
- "patch_size": 1,
18
- "pooled_projection_dim": 768
19
- }