Narsil commited on
Commit
9d806a7
·
unverified ·
1 Parent(s): 60eb087

Override out_indices config.

Browse files
Files changed (1) hide show
  1. config.json +23 -22
config.json CHANGED
@@ -1,24 +1,25 @@
1
  {
2
- "attention_probs_dropout_prob": 0.1,
3
- "drop_path_rate": 0.1,
4
- "hidden_act": "gelu",
5
- "hidden_dropout_prob": 0.1,
6
- "hidden_size": 32,
7
- "image_size": 30,
8
- "initializer_range": 0.02,
9
- "intermediate_size": 37,
10
- "layer_norm_eps": 1e-12,
11
- "layer_scale_init_value": 0.1,
12
- "model_type": "beit",
13
- "num_attention_heads": 4,
14
- "num_channels": 3,
15
- "num_hidden_layers": 5,
16
- "patch_size": 2,
17
- "transformers_version": "4.11.0.dev0",
18
- "use_absolute_position_embeddings": false,
19
- "use_mask_token": false,
20
- "use_mean_pooling": true,
21
- "use_relative_position_bias": false,
22
- "use_shared_relative_position_bias": false,
23
- "vocab_size": {}
 
24
  }
 
1
  {
2
+ "attention_probs_dropout_prob": 0.1,
3
+ "drop_path_rate": 0.1,
4
+ "hidden_act": "gelu",
5
+ "hidden_dropout_prob": 0.1,
6
+ "hidden_size": 32,
7
+ "image_size": 30,
8
+ "initializer_range": 0.02,
9
+ "intermediate_size": 37,
10
+ "layer_norm_eps": 1e-12,
11
+ "layer_scale_init_value": 0.1,
12
+ "model_type": "beit",
13
+ "num_attention_heads": 4,
14
+ "num_channels": 3,
15
+ "num_hidden_layers": 5,
16
+ "out_indices": [0, 1, 2, 3],
17
+ "patch_size": 2,
18
+ "transformers_version": "4.11.0.dev0",
19
+ "use_absolute_position_embeddings": false,
20
+ "use_mask_token": false,
21
+ "use_mean_pooling": true,
22
+ "use_relative_position_bias": false,
23
+ "use_shared_relative_position_bias": false,
24
+ "vocab_size": {}
25
  }