BDas commited on
Commit
b642287
·
1 Parent(s): ddf7cb7

Create new file

Browse files
Files changed (1) hide show
  1. ner.py +222 -0
ner.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+ """Introduction to the CoNLL-2003 Shared Task: Language-Independent Named Entity Recognition"""
18
+
19
+ import os
20
+
21
+ import datasets
22
+
23
+
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+
27
+ _CITATION = """\
28
+ aa
29
+ """
30
+
31
+ _DESCRIPTION = """\
32
+ aa
33
+ """
34
+
35
+ _URL = "https://raw.githubusercontent.com/BihterDass/ner/main/ner-main.zip"
36
+ _TRAINING_FILE = "train.txt"
37
+ _DEV_FILE = "valid.txt"
38
+ _TEST_FILE = "test.txt"
39
+
40
+
41
+ class Conll2003Config(datasets.BuilderConfig):
42
+ """BuilderConfig for Conll2003"""
43
+
44
+ def __init__(self, **kwargs):
45
+ """BuilderConfig forConll2003.
46
+ Args:
47
+ **kwargs: keyword arguments forwarded to super.
48
+ """
49
+ super(Conll2003Config, self).__init__(**kwargs)
50
+
51
+
52
+ class Conll2003(datasets.GeneratorBasedBuilder):
53
+ """Conll2003 dataset."""
54
+
55
+ BUILDER_CONFIGS = [
56
+ Conll2003Config(name="conll2003", version=datasets.Version("1.0.0"), description="Conll2003 dataset"),
57
+ ]
58
+
59
+ def _info(self):
60
+ return datasets.DatasetInfo(
61
+ description=_DESCRIPTION,
62
+ features=datasets.Features(
63
+ {
64
+ "id": datasets.Value("string"),
65
+ "tokens": datasets.Sequence(datasets.Value("string")),
66
+ "pos_tags": datasets.Sequence(
67
+ datasets.features.ClassLabel(
68
+ names=[
69
+ '"',
70
+ "''",
71
+ "#",
72
+ "$",
73
+ "(",
74
+ ")",
75
+ ",",
76
+ ".",
77
+ ":",
78
+ "``",
79
+ "CC",
80
+ "CD",
81
+ "DT",
82
+ "EX",
83
+ "FW",
84
+ "IN",
85
+ "JJ",
86
+ "JJR",
87
+ "JJS",
88
+ "LS",
89
+ "MD",
90
+ "NN",
91
+ "NNP",
92
+ "NNPS",
93
+ "NNS",
94
+ "NN|SYM",
95
+ "PDT",
96
+ "POS",
97
+ "PRP",
98
+ "PRP$",
99
+ "RB",
100
+ "RBR",
101
+ "RBS",
102
+ "RP",
103
+ "SYM",
104
+ "TO",
105
+ "UH",
106
+ "VB",
107
+ "VBD",
108
+ "VBG",
109
+ "VBN",
110
+ "VBP",
111
+ "VBZ",
112
+ "WDT",
113
+ "WP",
114
+ "WP$",
115
+ "WRB",
116
+ ]
117
+ )
118
+ ),
119
+ "chunk_tags": datasets.Sequence(
120
+ datasets.features.ClassLabel(
121
+ names=[
122
+ "O",
123
+ "B-ADJP",
124
+ "I-ADJP",
125
+ "B-ADVP",
126
+ "I-ADVP",
127
+ "B-CONJP",
128
+ "I-CONJP",
129
+ "B-INTJ",
130
+ "I-INTJ",
131
+ "B-LST",
132
+ "I-LST",
133
+ "B-NP",
134
+ "I-NP",
135
+ "B-PP",
136
+ "I-PP",
137
+ "B-PRT",
138
+ "I-PRT",
139
+ "B-SBAR",
140
+ "I-SBAR",
141
+ "B-UCP",
142
+ "I-UCP",
143
+ "B-VP",
144
+ "I-VP",
145
+ ]
146
+ )
147
+ ),
148
+ "ner_tags": datasets.Sequence(
149
+ datasets.features.ClassLabel(
150
+ names=[
151
+ "O",
152
+ "B-PER",
153
+ "I-PER",
154
+ "B-ORG",
155
+ "I-ORG",
156
+ "B-LOC",
157
+ "I-LOC",
158
+ "B-MISC",
159
+ "I-MISC",
160
+ ]
161
+ )
162
+ ),
163
+ }
164
+ ),
165
+ supervised_keys=None,
166
+ homepage="https://github.com/BihterDass/ner/blob/main/ner-main.zip",
167
+ citation=_CITATION,
168
+ )
169
+
170
+ def _split_generators(self, dl_manager):
171
+ """Returns SplitGenerators."""
172
+ downloaded_file = dl_manager.download_and_extract(_URL)
173
+ data_files = {
174
+ "train": os.path.join(downloaded_file, _TRAINING_FILE),
175
+ "dev": os.path.join(downloaded_file, _DEV_FILE),
176
+ "test": os.path.join(downloaded_file, _TEST_FILE),
177
+ }
178
+
179
+ return [
180
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_files["train"]}),
181
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": data_files["dev"]}),
182
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": data_files["test"]}),
183
+ ]
184
+ def _generate_examples(self, filepath):
185
+ logger.info("⏳ Generating examples from = %s", filepath)
186
+ with open(filepath, encoding="utf-8") as f:
187
+ guid = 0
188
+ tokens = []
189
+ pos_tags = []
190
+ chunk_tags = []
191
+ ner_tags = []
192
+ for line in f:
193
+ if line.startswith("-DOCSTART-") or line == "" or line == "\n":
194
+ if tokens:
195
+ yield guid, {
196
+ "id": str(guid),
197
+ "tokens": tokens,
198
+ "pos_tags": pos_tags,
199
+ "chunk_tags": chunk_tags,
200
+ "ner_tags": ner_tags,
201
+ }
202
+ guid += 1
203
+ tokens = []
204
+ pos_tags = []
205
+ chunk_tags = []
206
+ ner_tags = []
207
+ else:
208
+ # conll2003 tokens are space separated
209
+ splits = line.split(" ")
210
+ tokens.append(splits[0])
211
+ pos_tags.append(splits[1])
212
+ chunk_tags.append(splits[2])
213
+ ner_tags.append(splits[3].rstrip())
214
+ # last example
215
+ if tokens:
216
+ yield guid, {
217
+ "id": str(guid),
218
+ "tokens": tokens,
219
+ "pos_tags": pos_tags,
220
+ "chunk_tags": chunk_tags,
221
+ "ner_tags": ner_tags,
222
+ }