Update config.json
Browse files- config.json +4 -2
config.json
CHANGED
|
@@ -88,7 +88,8 @@
|
|
| 88 |
"hidden_size": 1280,
|
| 89 |
"intermediate_size": 5120,
|
| 90 |
"num_attention_heads": 20,
|
| 91 |
-
"num_hidden_layers": 32
|
|
|
|
| 92 |
},
|
| 93 |
"torch_dtype": "float32",
|
| 94 |
"transformers_version": null,
|
|
@@ -175,6 +176,7 @@
|
|
| 175 |
"intermediate_size": 8192,
|
| 176 |
"num_attention_heads": 16,
|
| 177 |
"num_hidden_layers": 48,
|
| 178 |
-
"patch_size": 14
|
|
|
|
| 179 |
}
|
| 180 |
}
|
|
|
|
| 88 |
"hidden_size": 1280,
|
| 89 |
"intermediate_size": 5120,
|
| 90 |
"num_attention_heads": 20,
|
| 91 |
+
"num_hidden_layers": 32,
|
| 92 |
+
"projection_dim": 1280
|
| 93 |
},
|
| 94 |
"torch_dtype": "float32",
|
| 95 |
"transformers_version": null,
|
|
|
|
| 176 |
"intermediate_size": 8192,
|
| 177 |
"num_attention_heads": 16,
|
| 178 |
"num_hidden_layers": 48,
|
| 179 |
+
"patch_size": 14,
|
| 180 |
+
"projection_dim": 1280
|
| 181 |
}
|
| 182 |
}
|