dbaezaj commited on
Commit
ebb5114
·
1 Parent(s): 547af7d

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +39 -19
  2. model.safetensors +2 -2
config.json CHANGED
@@ -12,28 +12,48 @@
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2",
18
- "3": "LABEL_3",
19
- "4": "LABEL_4",
20
- "5": "LABEL_5",
21
- "6": "LABEL_6",
22
- "7": "LABEL_7",
23
- "8": "LABEL_8"
 
 
 
 
 
 
 
 
 
 
24
  },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 3072,
27
  "label2id": {
28
- "LABEL_0": 0,
29
- "LABEL_1": 1,
30
- "LABEL_2": 2,
31
- "LABEL_3": 3,
32
- "LABEL_4": 4,
33
- "LABEL_5": 5,
34
- "LABEL_6": 6,
35
- "LABEL_7": 7,
36
- "LABEL_8": 8
 
 
 
 
 
 
 
 
 
 
37
  },
38
  "layer_norm_eps": 1e-12,
39
  "max_position_embeddings": 130,
@@ -44,7 +64,7 @@
44
  "position_embedding_type": "absolute",
45
  "problem_type": "single_label_classification",
46
  "torch_dtype": "float32",
47
- "transformers_version": "4.35.0",
48
  "type_vocab_size": 1,
49
  "use_cache": true,
50
  "vocab_size": 30002
 
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
+ "0": "O",
16
+ "1": "B-EVENT",
17
+ "2": "I-EVENT",
18
+ "3": "B-GROUP",
19
+ "4": "I-GROUP",
20
+ "5": "B-LOC",
21
+ "6": "I-LOC",
22
+ "7": "B-ORG",
23
+ "8": "I-ORG",
24
+ "9": "B-OTHER",
25
+ "10": "I-OTHER",
26
+ "11": "B-PER",
27
+ "12": "I-PER",
28
+ "13": "B-PROD",
29
+ "14": "I-PROD",
30
+ "15": "B-TIME",
31
+ "16": "I-TIME",
32
+ "17": "B-TITLE",
33
+ "18": "I-TITLE"
34
  },
35
  "initializer_range": 0.02,
36
  "intermediate_size": 3072,
37
  "label2id": {
38
+ "B-EVENT": 1,
39
+ "B-GROUP": 3,
40
+ "B-LOC": 5,
41
+ "B-ORG": 7,
42
+ "B-OTHER": 9,
43
+ "B-PER": 11,
44
+ "B-PROD": 13,
45
+ "B-TIME": 15,
46
+ "B-TITLE": 17,
47
+ "I-EVENT": 2,
48
+ "I-GROUP": 4,
49
+ "I-LOC": 6,
50
+ "I-ORG": 8,
51
+ "I-OTHER": 10,
52
+ "I-PER": 12,
53
+ "I-PROD": 14,
54
+ "I-TIME": 16,
55
+ "I-TITLE": 18,
56
+ "O": 0
57
  },
58
  "layer_norm_eps": 1e-12,
59
  "max_position_embeddings": 130,
 
64
  "position_embedding_type": "absolute",
65
  "problem_type": "single_label_classification",
66
  "torch_dtype": "float32",
67
+ "transformers_version": "4.35.1",
68
  "type_vocab_size": 1,
69
  "use_cache": true,
70
  "vocab_size": 30002
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4008387a82701ce6adbfec5dd211286563e1d17bd9082bb75867291093aa5af7
3
- size 435206756
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10a52ec094aac2478c3cbc243bb30ecd2ba922460a3b17ce27c33cae4c2deee5
3
+ size 435237516