eduagarcia commited on
Commit
d3c3722
·
1 Parent(s): 27a0c27

Create generic_conll.py

Browse files
Files changed (1) hide show
  1. generic_conll.py +107 -0
generic_conll.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+ """Introduction to the CoNLL-2003 Shared Task: Language-Independent Named Entity Recognition"""
18
+
19
+ import os
20
+
21
+ import datasets
22
+
23
+
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+
27
+ class ConllConfig(datasets.BuilderConfig):
28
+ """BuilderConfig for Conll"""
29
+
30
+ def __init__(self, **kwargs):
31
+ """BuilderConfig forConll.
32
+ Args:
33
+ **kwargs: keyword arguments forwarded to super.
34
+ """
35
+ super(ConllConfig, self).__init__(**kwargs)
36
+
37
+
38
+ class Conll(datasets.GeneratorBasedBuilder):
39
+ """Conll dataset."""
40
+
41
+ BUILDER_CONFIGS = [
42
+ ConllConfig(name="conll", version=datasets.Version("1.0.0"), description="Conll dataset"),
43
+ ]
44
+ DEFAULT_CONFIG_NAME = "conll"
45
+
46
+ def _info(self):
47
+ return datasets.DatasetInfo(
48
+ description=_DESCRIPTION,
49
+ features=datasets.Features(
50
+ {
51
+ "id": datasets.Value("string"),
52
+ "tokens": datasets.Sequence(datasets.Value("string")),
53
+ "ner_tags": datasets.Sequence(datasets.Value("string")),
54
+ }
55
+ ),
56
+ supervised_keys=None,
57
+ )
58
+
59
+ def _split_generators(self, dl_manager):
60
+ """Returns SplitGenerators.""".
61
+ if not self.config.data_files:
62
+ raise ValueError(f"At least one data file must be specified, but got data_files={self.config.data_files}")
63
+ data_files = dl_manager.download_and_extract(self.config.data_files)
64
+ if isinstance(data_files, (str, list, tuple)):
65
+ files = data_files
66
+ if isinstance(files, str):
67
+ files = [files]
68
+ files = [dl_manager.iter_files(file) for file in files]
69
+ return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"files": files})]
70
+ splits = []
71
+ for split_name, files in data_files.items():
72
+ if isinstance(files, str):
73
+ files = [files]
74
+ files = [dl_manager.iter_files(file) for file in files]
75
+ splits.append(datasets.SplitGenerator(name=split_name, gen_kwargs={"files": files}))
76
+ return splits
77
+
78
+ def _generate_examples(self, files):
79
+ for filepath in files:
80
+ logger.info("⏳ Generating examples from = %s", filepath)
81
+ with open(filepath, encoding="utf-8") as f:
82
+ guid = 0
83
+ tokens = []
84
+ ner_tags = []
85
+ for line in f:
86
+ if line.startswith("-DOCSTART-") or line == "" or line == "\n":
87
+ if tokens:
88
+ yield guid, {
89
+ "id": str(guid),
90
+ "tokens": tokens,
91
+ "ner_tags": ner_tags,
92
+ }
93
+ guid += 1
94
+ tokens = []
95
+ ner_tags = []
96
+ else:
97
+ # conll2003 tokens are space separated
98
+ splits = line.split(" ")
99
+ tokens.append(splits[0])
100
+ ner_tags.append(splits[-1].rstrip())
101
+ # last example
102
+ if tokens:
103
+ yield guid, {
104
+ "id": str(guid),
105
+ "tokens": tokens,
106
+ "ner_tags": ner_tags,
107
+ }