ajtorek commited on
Commit
048e2d9
·
verified ·
1 Parent(s): bc6e0ed

Upload TextTransformer

Browse files
Files changed (2) hide show
  1. config.json +8 -8
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,32 +1,32 @@
1
  {
2
- "_name_or_path": "WAC_Masked_language_modeling_test/output_models/pytorch_model.bin",
3
  "add_cross_attentions": false,
4
  "architectures": [
5
  "TextTransformer"
6
  ],
7
  "attention_implementation": "sdpa",
8
  "attention_probability_dropout_probability": 0.1,
9
- "bos_token_id": 0,
 
10
  "dropout_probability": 0.1,
11
  "embedding_size": 128,
12
- "eos_token_id": 2,
13
  "hidden_activation_function": "gelu",
14
  "hidden_size": 256,
15
  "initializer_factor": 1.0,
16
  "intermediate_size": 1024,
17
  "layer_norm_eps": 1e-12,
18
- "mask_token_id": 4,
19
- "max_position_embeddings": 514,
20
  "num_attention_heads": 4,
21
  "num_hidden_layers": 12,
22
  "num_intermediate_layers": 1,
23
- "pad_token_id": 1,
24
  "position_embedding_type": "absolute",
25
  "torch_dtype": "float32",
26
  "transformers_version": "4.48.3",
27
- "type_vocab_size": 1,
28
  "use_cache": false,
29
- "vocab_size": 50265,
30
  "wac_distribution_layers": [],
31
  "wac_distribution_matrix": "none",
32
  "wac_embedding_size": -1
 
1
  {
 
2
  "add_cross_attentions": false,
3
  "architectures": [
4
  "TextTransformer"
5
  ],
6
  "attention_implementation": "sdpa",
7
  "attention_probability_dropout_probability": 0.1,
8
+ "bos_token_id": null,
9
+ "classifier_dropout": null,
10
  "dropout_probability": 0.1,
11
  "embedding_size": 128,
12
+ "eos_token_id": null,
13
  "hidden_activation_function": "gelu",
14
  "hidden_size": 256,
15
  "initializer_factor": 1.0,
16
  "intermediate_size": 1024,
17
  "layer_norm_eps": 1e-12,
18
+ "mask_token_id": null,
19
+ "max_position_embeddings": 512,
20
  "num_attention_heads": 4,
21
  "num_hidden_layers": 12,
22
  "num_intermediate_layers": 1,
23
+ "pad_token_id": 0,
24
  "position_embedding_type": "absolute",
25
  "torch_dtype": "float32",
26
  "transformers_version": "4.48.3",
27
+ "type_vocab_size": 2,
28
  "use_cache": false,
29
+ "vocab_size": 30522,
30
  "wac_distribution_layers": [],
31
  "wac_distribution_matrix": "none",
32
  "wac_embedding_size": -1
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ff1476b91a44fe374cc0e91dbd01e5d6aeb0c78e6f2a888f1c9a6e0bfbcb035
3
- size 114715936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7263e7181b95e64a360281c8981d10c140942215bbf9b810748f87751b0c9876
3
+ size 54222832