Add text_config
Browse files- config.json +22 -1
config.json
CHANGED
|
@@ -36,5 +36,26 @@
|
|
| 36 |
"vit_remove_last":false,
|
| 37 |
"vit_transformer_width":512,
|
| 38 |
"vit_width":768,
|
| 39 |
-
"vocab_size":50265
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
}
|
|
|
|
| 36 |
"vit_remove_last":false,
|
| 37 |
"vit_transformer_width":512,
|
| 38 |
"vit_width":768,
|
| 39 |
+
"vocab_size":50265,
|
| 40 |
+
"text_config":{
|
| 41 |
+
"architectures": ["BridgeTowerTextModel"],
|
| 42 |
+
"vocab_size": 50265,
|
| 43 |
+
"hidden_size": 768,
|
| 44 |
+
"num_hidden_layers": 12,
|
| 45 |
+
"num_attention_heads": 12,
|
| 46 |
+
"intermediate_size": 3072,
|
| 47 |
+
"hidden_act": "gelu",
|
| 48 |
+
"hidden_dropout_prob": 0.1,
|
| 49 |
+
"attention_probs_dropout_prob": 0.1,
|
| 50 |
+
"max_position_embeddings": 514,
|
| 51 |
+
"type_vocab_size": 1,
|
| 52 |
+
"initializer_range": 0.02,
|
| 53 |
+
"layer_norm_eps": 1e-05,
|
| 54 |
+
"pad_token_id": 1,
|
| 55 |
+
"bos_token_id": 0,
|
| 56 |
+
"eos_token_id": 2,
|
| 57 |
+
"position_embedding_type": "absolute",
|
| 58 |
+
"use_cache": True,
|
| 59 |
+
"classifier_dropout": None
|
| 60 |
+
}
|
| 61 |
}
|