AbstractPhil commited on
Commit
34a89ad
·
verified ·
1 Parent(s): de38fd8

Checkpoint step 20

Browse files
Files changed (1) hide show
  1. config.json +28 -0
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hidden_size": 768,
3
+ "num_attention_heads": 6,
4
+ "attention_head_dim": 128,
5
+ "in_channels": 16,
6
+ "patch_size": 1,
7
+ "joint_attention_dim": 768,
8
+ "pooled_projection_dim": 768,
9
+ "num_double_layers": 2,
10
+ "num_single_layers": 4,
11
+ "mlp_ratio": 4.0,
12
+ "axes_dims_rope": [
13
+ 16,
14
+ 56,
15
+ 56
16
+ ],
17
+ "use_lune_expert": true,
18
+ "lune_expert_dim": 1280,
19
+ "lune_hidden_dim": 512,
20
+ "lune_dropout": 0.1,
21
+ "freeze_lune": false,
22
+ "use_sol_prior": true,
23
+ "sol_spatial_size": 8,
24
+ "sol_hidden_dim": 256,
25
+ "sol_geometric_weight": 0.7,
26
+ "freeze_sol": false,
27
+ "use_t5_vec": true
28
+ }