winstxnhdw commited on
Commit
caa04b2
·
verified ·
1 Parent(s): eb2c31f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +66 -1
config.json CHANGED
@@ -3,5 +3,70 @@
3
  "eos_token": "</s>",
4
  "layer_norm_epsilon": 1e-12,
5
  "multi_query_attention": false,
6
- "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
 
3
  "eos_token": "</s>",
4
  "layer_norm_epsilon": 1e-12,
5
  "multi_query_attention": false,
6
+ "unk_token": "[UNK]",
7
+ "architectures": [
8
+ "BertForTokenClassification"
9
+ ],
10
+ "attention_probs_dropout_prob": 0.1,
11
+ "bos_token_id": null,
12
+ "directionality": "bidi",
13
+ "do_sample": false,
14
+ "eos_token_ids": null,
15
+ "finetuning_task": null,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.1,
18
+ "hidden_size": 1024,
19
+ "id2label": {
20
+ "0": "O",
21
+ "1": "B-MISC",
22
+ "2": "I-MISC",
23
+ "3": "B-PER",
24
+ "4": "I-PER",
25
+ "5": "B-ORG",
26
+ "6": "I-ORG",
27
+ "7": "B-LOC",
28
+ "8": "I-LOC"
29
+ },
30
+ "initializer_range": 0.02,
31
+ "intermediate_size": 4096,
32
+ "is_decoder": false,
33
+ "label2id": {
34
+ "B-LOC": 7,
35
+ "B-MISC": 1,
36
+ "B-ORG": 5,
37
+ "B-PER": 3,
38
+ "I-LOC": 8,
39
+ "I-MISC": 2,
40
+ "I-ORG": 6,
41
+ "I-PER": 4,
42
+ "O": 0
43
+ },
44
+ "layer_norm_eps": 1e-12,
45
+ "length_penalty": 1.0,
46
+ "max_length": 20,
47
+ "max_position_embeddings": 512,
48
+ "model_type": "bert",
49
+ "num_attention_heads": 16,
50
+ "num_beams": 1,
51
+ "num_hidden_layers": 24,
52
+ "num_labels": 9,
53
+ "num_return_sequences": 1,
54
+ "output_attentions": false,
55
+ "output_hidden_states": false,
56
+ "output_past": true,
57
+ "pad_token_id": 0,
58
+ "pooler_fc_size": 768,
59
+ "pooler_num_attention_heads": 12,
60
+ "pooler_num_fc_layers": 3,
61
+ "pooler_size_per_head": 128,
62
+ "pooler_type": "first_token_transform",
63
+ "pruned_heads": {},
64
+ "repetition_penalty": 1.0,
65
+ "temperature": 1.0,
66
+ "top_k": 50,
67
+ "top_p": 1.0,
68
+ "torchscript": false,
69
+ "type_vocab_size": 2,
70
+ "use_bfloat16": false,
71
+ "vocab_size": 28996
72
  }