methodw commited on
Commit
6c93d33
·
1 Parent(s): 3399730

switch to dinov2-large

Browse files
{dinov2-base → dinov2-large}/config.json RENAMED
@@ -1,6 +1,4 @@
1
  {
2
- "_name_or_path": "facebook/dinov2-base",
3
- "apply_layernorm": true,
4
  "architectures": [
5
  "Dinov2Model"
6
  ],
@@ -8,41 +6,19 @@
8
  "drop_path_rate": 0.0,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.0,
11
- "hidden_size": 768,
12
  "image_size": 518,
13
  "initializer_range": 0.02,
14
  "layer_norm_eps": 1e-06,
15
  "layerscale_value": 1.0,
16
  "mlp_ratio": 4,
17
  "model_type": "dinov2",
18
- "num_attention_heads": 12,
19
  "num_channels": 3,
20
- "num_hidden_layers": 12,
21
- "out_features": [
22
- "stage12"
23
- ],
24
- "out_indices": [
25
- 12
26
- ],
27
  "patch_size": 14,
28
  "qkv_bias": true,
29
- "reshape_hidden_states": true,
30
- "stage_names": [
31
- "stem",
32
- "stage1",
33
- "stage2",
34
- "stage3",
35
- "stage4",
36
- "stage5",
37
- "stage6",
38
- "stage7",
39
- "stage8",
40
- "stage9",
41
- "stage10",
42
- "stage11",
43
- "stage12"
44
- ],
45
  "torch_dtype": "float32",
46
- "transformers_version": "4.34.0",
47
  "use_swiglu_ffn": false
48
  }
 
1
  {
 
 
2
  "architectures": [
3
  "Dinov2Model"
4
  ],
 
6
  "drop_path_rate": 0.0,
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.0,
9
+ "hidden_size": 1024,
10
  "image_size": 518,
11
  "initializer_range": 0.02,
12
  "layer_norm_eps": 1e-06,
13
  "layerscale_value": 1.0,
14
  "mlp_ratio": 4,
15
  "model_type": "dinov2",
16
+ "num_attention_heads": 16,
17
  "num_channels": 3,
18
+ "num_hidden_layers": 24,
 
 
 
 
 
 
19
  "patch_size": 14,
20
  "qkv_bias": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  "torch_dtype": "float32",
22
+ "transformers_version": "4.31.0.dev0",
23
  "use_swiglu_ffn": false
24
  }
{dinov2-base → dinov2-large}/preprocessor_config.json RENAMED
File without changes