Deekshith-Dade commited on
Commit
b071aff
·
verified ·
1 Parent(s): a161bed

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -4
config.json CHANGED
@@ -5,8 +5,6 @@
5
  "initializer_factor": 1.0,
6
  "model_type": "siglip",
7
  "text_config": {
8
- "attention_dropout": 0.0,
9
- "hidden_act": "gelu_pytorch_tanh",
10
  "hidden_size": 1152,
11
  "intermediate_size": 4304,
12
  "layer_norm_eps": 1e-06,
@@ -20,8 +18,6 @@
20
  "torch_dtype": "float32",
21
  "transformers_version": "4.52.4",
22
  "vision_config": {
23
- "attention_dropout": 0.0,
24
- "hidden_act": "gelu_pytorch_tanh",
25
  "hidden_size": 1152,
26
  "image_size": 448,
27
  "intermediate_size": 4304,
 
5
  "initializer_factor": 1.0,
6
  "model_type": "siglip",
7
  "text_config": {
 
 
8
  "hidden_size": 1152,
9
  "intermediate_size": 4304,
10
  "layer_norm_eps": 1e-06,
 
18
  "torch_dtype": "float32",
19
  "transformers_version": "4.52.4",
20
  "vision_config": {
 
 
21
  "hidden_size": 1152,
22
  "image_size": 448,
23
  "intermediate_size": 4304,