negfir commited on
Commit
4f88b96
·
1 Parent(s): eb803ec
Files changed (2) hide show
  1. config.json +16 -14
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,24 +1,26 @@
1
  {
2
- "_name_or_path": "distilbert-base-uncased",
3
- "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForMaskedLM"
6
  ],
7
- "attention_dropout": 0.1,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "hidden_dim": 3072,
 
 
11
  "initializer_range": 0.02,
 
 
12
  "max_position_embeddings": 512,
13
- "model_type": "distilbert",
14
- "n_heads": 12,
15
- "n_layers": 6,
16
  "pad_token_id": 0,
17
- "qa_dropout": 0.1,
18
- "seq_classif_dropout": 0.2,
19
- "sinusoidal_pos_embds": false,
20
- "tie_weights_": true,
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.17.0",
 
 
23
  "vocab_size": 30522
24
  }
 
1
  {
2
+ "_name_or_path": "bert-base-uncased",
 
3
  "architectures": [
4
+ "BertForMaskedLM"
5
  ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 6,
19
  "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
 
 
 
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.17.0",
23
+ "type_vocab_size": 2,
24
+ "use_cache": true,
25
  "vocab_size": 30522
26
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f9a22054b9b20c5004e7775e4ae4d6e4781eecb912040438106849209d66af15
3
- size 267983023
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec0a104c78f4f1ef059160736a0f561611747829b82726cac954d4bf4afab8aa
3
+ size 267994987