mjpsm commited on
Commit
5f7be65
·
verified ·
1 Parent(s): c899e5e

Upload DistilBertForSequenceClassification

Browse files
Files changed (1) hide show
  1. config.json +8 -1
config.json CHANGED
@@ -10,7 +10,15 @@
10
  "dtype": "float32",
11
  "eos_token_id": null,
12
  "hidden_dim": 3072,
 
 
 
 
13
  "initializer_range": 0.02,
 
 
 
 
14
  "max_position_embeddings": 512,
15
  "model_type": "distilbert",
16
  "n_heads": 12,
@@ -23,6 +31,5 @@
23
  "tie_weights_": true,
24
  "tie_word_embeddings": true,
25
  "transformers_version": "5.0.0",
26
- "use_cache": false,
27
  "vocab_size": 30522
28
  }
 
10
  "dtype": "float32",
11
  "eos_token_id": null,
12
  "hidden_dim": 3072,
13
+ "id2label": {
14
+ "0": "NOT_CHECKIN",
15
+ "1": "CHECKIN"
16
+ },
17
  "initializer_range": 0.02,
18
+ "label2id": {
19
+ "CHECKIN": 1,
20
+ "NOT_CHECKIN": 0
21
+ },
22
  "max_position_embeddings": 512,
23
  "model_type": "distilbert",
24
  "n_heads": 12,
 
31
  "tie_weights_": true,
32
  "tie_word_embeddings": true,
33
  "transformers_version": "5.0.0",
 
34
  "vocab_size": 30522
35
  }