VIOLET21 commited on
Commit
104b6b8
·
verified ·
1 Parent(s): f80c7ec

Upload TextClassificationPipeline

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. special_tokens_map.json +35 -5
  3. tokenizer.json +0 -0
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "pipeline_tag": "text-classification",
3
  "_num_labels": 5,
4
  "architectures": [
5
  "BertForSequenceClassification"
@@ -29,6 +28,7 @@
29
  "num_hidden_layers": 12,
30
  "output_past": true,
31
  "pad_token_id": 0,
 
32
  "pooler_fc_size": 768,
33
  "pooler_num_attention_heads": 12,
34
  "pooler_num_fc_layers": 3,
 
1
  {
 
2
  "_num_labels": 5,
3
  "architectures": [
4
  "BertForSequenceClassification"
 
28
  "num_hidden_layers": 12,
29
  "output_past": true,
30
  "pad_token_id": 0,
31
+ "pipeline_tag": "text-classification",
32
  "pooler_fc_size": 768,
33
  "pooler_num_attention_heads": 12,
34
  "pooler_num_fc_layers": 3,
special_tokens_map.json CHANGED
@@ -1,7 +1,37 @@
1
  {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
 
1
  {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff