tejasmakode commited on
Commit
cb1963b
·
verified ·
1 Parent(s): 67cb0ac

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -82
config.json CHANGED
@@ -1,40 +1,13 @@
1
  {
2
  "_name_or_path": "microsoft/Florence-2-base",
3
- "architectures": [
4
- "Florence2ForConditionalGeneration"
5
- ],
6
  "vision_config": {
7
  "model_type": "davit",
8
- "depths": [
9
- 1,
10
- 1,
11
- 9,
12
- 1
13
- ],
14
- "dim_embed": [
15
- 128,
16
- 256,
17
- 512,
18
- 1024
19
- ],
20
- "num_heads": [
21
- 4,
22
- 8,
23
- 16,
24
- 32
25
- ],
26
- "patch_size": [
27
- 7,
28
- 3,
29
- 3,
30
- 3
31
- ],
32
- "patch_stride": [
33
- 4,
34
- 2,
35
- 2,
36
- 2
37
- ],
38
  "window_size": 12
39
  },
40
  "auto_map": {
@@ -49,7 +22,6 @@
49
  "pad_token_id": 1,
50
  "projection_dim": 768,
51
  "text_config": {
52
- "_name_or_path": "",
53
  "activation_dropout": 0.1,
54
  "activation_function": "gelu",
55
  "add_bias_logits": false,
@@ -82,52 +54,5 @@
82
  "vocab_size": 51289
83
  },
84
  "torch_dtype": "float32",
85
- "transformers_version": "4.41.2",
86
- "vision_config": {
87
- "model_type": "davit",
88
- "depths": [
89
- 1,
90
- 1,
91
- 9,
92
- 1
93
- ],
94
- "dim_embed": [
95
- 128,
96
- 256,
97
- 512,
98
- 1024
99
- ],
100
- "num_heads": [
101
- 4,
102
- 8,
103
- 16,
104
- 32
105
- ],
106
- "patch_size": [
107
- 7,
108
- 3,
109
- 3,
110
- 3
111
- ],
112
- "patch_stride": [
113
- 4,
114
- 2,
115
- 2,
116
- 2
117
- ],
118
- "window_size": 12,
119
- "image_feature_source": [
120
- "spatial_avg_pool",
121
- "temporal_avg_pool"
122
- ],
123
- "image_pos_embed": {
124
- "max_pos_embeddings": 50,
125
- "type": "learned_abs_2d"
126
- },
127
- "visual_temporal_embedding": {
128
- "max_temporal_embeddings": 100,
129
- "type": "COSINE"
130
- }
131
- },
132
- "vocab_size": 51289
133
  }
 
1
  {
2
  "_name_or_path": "microsoft/Florence-2-base",
3
+ "architectures": ["Florence2ForConditionalGeneration"],
 
 
4
  "vision_config": {
5
  "model_type": "davit",
6
+ "depths": [1, 1, 9, 1],
7
+ "dim_embed": [128, 256, 512, 1024],
8
+ "num_heads": [4, 8, 16, 32],
9
+ "patch_size": [7, 3, 3, 3],
10
+ "patch_stride": [4, 2, 2, 2],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  "window_size": 12
12
  },
13
  "auto_map": {
 
22
  "pad_token_id": 1,
23
  "projection_dim": 768,
24
  "text_config": {
 
25
  "activation_dropout": 0.1,
26
  "activation_function": "gelu",
27
  "add_bias_logits": false,
 
54
  "vocab_size": 51289
55
  },
56
  "torch_dtype": "float32",
57
+ "transformers_version": "4.41.2"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  }