Datasets:

Languages:
Indonesian
ArXiv:
License:
holylovenia commited on
Commit
8e72487
·
1 Parent(s): cf72313

Upload unimorph_id.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. unimorph_id.py +244 -0
unimorph_id.py ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022 The HuggingFace Datasets Authors and the current dataset script contributor.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from pathlib import Path
17
+ from typing import Dict, List, Tuple
18
+
19
+ import datasets
20
+
21
+ from nusacrowd.utils import schemas
22
+ from nusacrowd.utils.configs import NusantaraConfig
23
+ from nusacrowd.utils.constants import Tasks
24
+
25
+ _CITATION = """\
26
+ @inproceedings{pimentel-ryskina-etal-2021-sigmorphon,
27
+ title = "SIGMORPHON 2021 Shared Task on Morphological Reinflection: Generalization Across Languages",
28
+ author = "Pimentel, Tiago and
29
+ Ryskina, Maria and
30
+ Mielke, Sabrina J. and
31
+ Wu, Shijie and
32
+ Chodroff, Eleanor and
33
+ Leonard, Brian and
34
+ Nicolai, Garrett and
35
+ Ghanggo Ate, Yustinus and
36
+ Khalifa, Salam and
37
+ Habash, Nizar and
38
+ El-Khaissi, Charbel and
39
+ Goldman, Omer and
40
+ Gasser, Michael and
41
+ Lane, William and
42
+ Coler, Matt and
43
+ Oncevay, Arturo and
44
+ Montoya Samame, Jaime Rafael and
45
+ Silva Villegas, Gema Celeste and
46
+ Ek, Adam and
47
+ Bernardy, Jean-Philippe and
48
+ Shcherbakov, Andrey and
49
+ Bayyr-ool, Aziyana and
50
+ Sheifer, Karina and
51
+ Ganieva, Sofya and
52
+ Plugaryov, Matvey and
53
+ Klyachko, Elena and
54
+ Salehi, Ali and
55
+ Krizhanovsky, Andrew and
56
+ Krizhanovsky, Natalia and
57
+ Vania, Clara and
58
+ Ivanova, Sardana and
59
+ Salchak, Aelita and
60
+ Straughn, Christopher and
61
+ Liu, Zoey and
62
+ Washington, Jonathan North and
63
+ Ataman, Duygu and
64
+ Kiera{\'s}, Witold and
65
+ Woli{\'n}ski, Marcin and
66
+ Suhardijanto, Totok and
67
+ Stoehr, Niklas and
68
+ Nuriah, Zahroh and
69
+ Ratan, Shyam and
70
+ Tyers, Francis M. and
71
+ Ponti, Edoardo M. and
72
+ Aiton, Grant and
73
+ Hatcher, Richard J. and
74
+ Prud'hommeaux, Emily and
75
+ Kumar, Ritesh and
76
+ Hulden, Mans and
77
+ Barta, Botond and
78
+ Lakatos, Dorina and
79
+ Szolnok, G{\'a}bor and
80
+ {\'A}cs, Judit and
81
+ Raj, Mohit and
82
+ Yarowsky, David and
83
+ Cotterell, Ryan and
84
+ Ambridge, Ben and
85
+ Vylomova, Ekaterina",
86
+ booktitle = "Proceedings of the 18th SIGMORPHON Workshop on Computational Research in Phonetics, Phonology, and Morphology",
87
+ month = aug,
88
+ year = "2021",
89
+ address = "Online",
90
+ publisher = "Association for Computational Linguistics",
91
+ url = "https://aclanthology.org/2021.sigmorphon-1.25",
92
+ doi = "10.18653/v1/2021.sigmorphon-1.25",
93
+ pages = "229--259"
94
+ }"""
95
+
96
+ _LOCAL = False
97
+ _LANGUAGES = ["ind"] # We follow ISO639-3 language code (https://iso639-3.sil.org/code_tables/639/data)
98
+ _DATASETNAME = "unimorph_id"
99
+
100
+ _DESCRIPTION = """\
101
+ The UniMorph project, Indonesian chapter.
102
+ Due to sparsity of UniMorph original parsing, raw source is used instead.
103
+ Original parsing can be found on https://huggingface.co/datasets/universal_morphologies/blob/2.3.2/universal_morphologies.py
104
+ """
105
+
106
+ _HOMEPAGE = "https://github.com/unimorph/ind"
107
+
108
+ _LICENSE = "Creative Commons Attribution-ShareAlike 3.0 Unported (CC BY-SA 3.0)"
109
+
110
+ _URLS = {
111
+ _DATASETNAME: "https://raw.githubusercontent.com/unimorph/ind/main/ind",
112
+ }
113
+
114
+ _SUPPORTED_TASKS = [Tasks.MORPHOLOGICAL_INFLECTION]
115
+
116
+ _SOURCE_VERSION = "1.0.0"
117
+ _NUSANTARA_VERSION = "1.0.0"
118
+
119
+
120
+ class UnimorphIdDataset(datasets.GeneratorBasedBuilder):
121
+ """The UniMorph project, Indonesian chapter."""
122
+
123
+ SOURCE_VERSION = datasets.Version(_SOURCE_VERSION)
124
+ NUSANTARA_VERSION = datasets.Version(_NUSANTARA_VERSION)
125
+
126
+ label_classes = [
127
+ "",
128
+ "1",
129
+ "2",
130
+ "3",
131
+ "ACT",
132
+ "ADJ",
133
+ "ADV",
134
+ "APPL",
135
+ "CAUS",
136
+ "DEF",
137
+ "FEM",
138
+ "FOC",
139
+ "ITER",
140
+ "MASC",
141
+ "N",
142
+ "NEG",
143
+ "NEUT",
144
+ "PASS",
145
+ "POS",
146
+ "PSS1S",
147
+ "PSS2S",
148
+ "PSS3S",
149
+ "SG",
150
+ "SPRL",
151
+ "TR",
152
+ "V",
153
+ ]
154
+
155
+ BUILDER_CONFIGS = [
156
+ NusantaraConfig(
157
+ name=f"{_DATASETNAME}_source",
158
+ version=SOURCE_VERSION,
159
+ description=f"{_DATASETNAME} source schema",
160
+ schema="source",
161
+ subset_id=f"{_DATASETNAME}",
162
+ ),
163
+ NusantaraConfig(
164
+ name=f"{_DATASETNAME}_nusantara_pairs_multi",
165
+ version=SOURCE_VERSION,
166
+ description=f"{_DATASETNAME} Nusantara schema",
167
+ schema="nusantara_pairs_multi",
168
+ subset_id=f"{_DATASETNAME}",
169
+ ),
170
+ ]
171
+
172
+ DEFAULT_CONFIG_NAME = f"{_DATASETNAME}_source"
173
+
174
+ def _info(self) -> datasets.DatasetInfo:
175
+
176
+ if self.config.schema == "source":
177
+ features = datasets.Features(
178
+ {
179
+ "lemma": datasets.Value("string"),
180
+ "form": datasets.Value("string"),
181
+ "tag": [datasets.Value("string")],
182
+ }
183
+ )
184
+
185
+ elif self.config.schema == "nusantara_pairs_multi":
186
+ features = schemas.pairs_multi_features(self.label_classes)
187
+
188
+ else:
189
+ raise NotImplementedError(f"Schema '{self.config.schema}' is not defined.")
190
+
191
+ return datasets.DatasetInfo(
192
+ description=_DESCRIPTION,
193
+ features=features,
194
+ homepage=_HOMEPAGE,
195
+ license=_LICENSE,
196
+ citation=_CITATION,
197
+ )
198
+
199
+ def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
200
+ """Returns SplitGenerators."""
201
+ urls = _URLS[_DATASETNAME]
202
+ data_path = dl_manager.download(urls)
203
+
204
+ return [
205
+ datasets.SplitGenerator(
206
+ name=datasets.Split.TRAIN,
207
+ gen_kwargs={"filepath": data_path},
208
+ ),
209
+ ]
210
+
211
+ def _generate_examples(self, filepath: Path) -> Tuple[int, Dict]:
212
+ """Yields examples as (key, example) tuples."""
213
+
214
+ with open(filepath, "r", encoding="utf8") as f:
215
+ dataset = list(map(lambda l: l.rstrip("\r\n").split("\t"), f))
216
+
217
+ _assert = set(map(len, dataset))
218
+ if _assert != {3}:
219
+ raise AssertionError(f"Expecting exactly 3 fields (lemma, form, tag/category), but found: {_assert}")
220
+
221
+ def _raw2schema(line):
222
+ return {
223
+ "lemma": line[0],
224
+ "form": line[1],
225
+ "tag": line[2].split(";"),
226
+ }
227
+
228
+ dataset = list(map(_raw2schema, dataset))
229
+
230
+ if self.config.schema == "source":
231
+ for key, example in enumerate(dataset):
232
+ yield key, example
233
+
234
+ elif self.config.schema == "nusantara_pairs_multi":
235
+ for key, ex in enumerate(dataset):
236
+ yield key, {
237
+ "id": str(key),
238
+ "text_1": ex["lemma"],
239
+ "text_2": ex["form"],
240
+ "label": ex["tag"],
241
+ }
242
+
243
+ else:
244
+ raise NotImplementedError(f"Schema '{self.config.schema}' is not defined.")