lekhnathrijal commited on
Commit
83bb3b5
·
1 Parent(s): 72afa9d

Upload DistilBertForTokenClassification

Browse files
Files changed (1) hide show
  1. config.json +14 -16
config.json CHANGED
@@ -1,15 +1,14 @@
1
  {
2
- "_name_or_path": "google-bert/bert-base-uncased",
 
3
  "architectures": [
4
- "BertForTokenClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
 
8
  "finetuning_task": "ner",
9
- "gradient_checkpointing": false,
10
- "hidden_act": "gelu",
11
- "hidden_dropout_prob": 0.1,
12
- "hidden_size": 768,
13
  "id2label": {
14
  "0": "B-SCAN_PERIOD",
15
  "1": "I-SCAN_PERIOD",
@@ -20,7 +19,6 @@
20
  "6": "O"
21
  },
22
  "initializer_range": 0.02,
23
- "intermediate_size": 3072,
24
  "label2id": {
25
  "B-OPTION1": 2,
26
  "B-OPTION2": 4,
@@ -30,16 +28,16 @@
30
  "I-SCAN_PERIOD": 1,
31
  "O": 6
32
  },
33
- "layer_norm_eps": 1e-12,
34
  "max_position_embeddings": 512,
35
- "model_type": "bert",
36
- "num_attention_heads": 12,
37
- "num_hidden_layers": 12,
38
  "pad_token_id": 0,
39
- "position_embedding_type": "absolute",
 
 
 
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.48.1",
42
- "type_vocab_size": 2,
43
- "use_cache": true,
44
  "vocab_size": 30522
45
  }
 
1
  {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased",
3
+ "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForTokenClassification"
6
  ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
  "finetuning_task": "ner",
11
+ "hidden_dim": 3072,
 
 
 
12
  "id2label": {
13
  "0": "B-SCAN_PERIOD",
14
  "1": "I-SCAN_PERIOD",
 
19
  "6": "O"
20
  },
21
  "initializer_range": 0.02,
 
22
  "label2id": {
23
  "B-OPTION1": 2,
24
  "B-OPTION2": 4,
 
28
  "I-SCAN_PERIOD": 1,
29
  "O": 6
30
  },
 
31
  "max_position_embeddings": 512,
32
+ "model_type": "distilbert",
33
+ "n_heads": 12,
34
+ "n_layers": 6,
35
  "pad_token_id": 0,
36
+ "qa_dropout": 0.1,
37
+ "seq_classif_dropout": 0.2,
38
+ "sinusoidal_pos_embds": false,
39
+ "tie_weights_": true,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.48.1",
 
 
42
  "vocab_size": 30522
43
  }