Sasidhar1826 commited on
Commit
e147ce7
·
verified ·
1 Parent(s): e11c0c0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -14
config.json CHANGED
@@ -1,8 +1,7 @@
1
  {
2
- "architectures": [
3
- "BertForSequenceClassification"
4
- ],
5
  "attention_probs_dropout_prob": 0.1,
 
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 768,
@@ -14,17 +13,10 @@
14
  "num_attention_heads": 12,
15
  "num_hidden_layers": 12,
16
  "pad_token_id": 0,
 
 
17
  "type_vocab_size": 2,
 
18
  "vocab_size": 30522,
19
- "id2label": {
20
- "0": "LABEL_0",
21
- "1": "LABEL_1"
22
- },
23
- "label2id": {
24
- "LABEL_0": 0,
25
- "LABEL_1": 1
26
- },
27
- "problem_type": "single_label_classification",
28
- "torch_dtype": "float32",
29
- "transformers_version": "4.0.0"
30
  }
 
1
  {
2
+ "architectures": ["BertForSequenceClassification"],
 
 
3
  "attention_probs_dropout_prob": 0.1,
4
+ "gradient_checkpointing": false,
5
  "hidden_act": "gelu",
6
  "hidden_dropout_prob": 0.1,
7
  "hidden_size": 768,
 
13
  "num_attention_heads": 12,
14
  "num_hidden_layers": 12,
15
  "pad_token_id": 0,
16
+ "position_embedding_type": "absolute",
17
+ "transformers_version": "4.6.0.dev0",
18
  "type_vocab_size": 2,
19
+ "use_cache": true,
20
  "vocab_size": 30522,
21
+ "num_labels": 2
 
 
 
 
 
 
 
 
 
 
22
  }