jannalu commited on
Commit
662e6dd
·
verified ·
1 Parent(s): 2914530

Delete loading script

Browse files
Files changed (1) hide show
  1. mutual.py +0 -135
mutual.py DELETED
@@ -1,135 +0,0 @@
1
- # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """MuTual dataset."""
15
-
16
-
17
- import json
18
- import os
19
- from pathlib import Path
20
-
21
- import datasets
22
-
23
-
24
- _CITATION = """\
25
- @inproceedings{mutual,
26
- title = "MuTual: A Dataset for Multi-Turn Dialogue Reasoning",
27
- author = "Cui, Leyang and Wu, Yu and Liu, Shujie and Zhang, Yue and Zhou, Ming" ,
28
- booktitle = "Proceedings of the 58th Conference of the Association for Computational Linguistics",
29
- year = "2020",
30
- publisher = "Association for Computational Linguistics",
31
- }
32
- """
33
-
34
- _DESCRIPTION = """\
35
- MuTual is a retrieval-based dataset for multi-turn dialogue reasoning, which is
36
- modified from Chinese high school English listening comprehension test data.
37
- """
38
-
39
- _HOMEPAGE = "https://github.com/Nealcly/MuTual"
40
-
41
- _LICENSE = "No license found"
42
-
43
- _URLS = "https://github.com/Nealcly/MuTual/archive/master.zip"
44
-
45
-
46
- class Mutual(datasets.GeneratorBasedBuilder):
47
- """MuTual: A Dataset for Multi-Turn Dialogue Reasoning"""
48
-
49
- VERSION = datasets.Version("0.0.1")
50
-
51
- BUILDER_CONFIGS = [
52
- datasets.BuilderConfig(
53
- name="mutual", version=VERSION, description="The MuTual dataset."
54
- ),
55
- datasets.BuilderConfig(
56
- name="mutual_plus",
57
- version=VERSION,
58
- description="MuTualPlus is a more difficult MuTual that replaces positive responses with a safe responses.",
59
- ),
60
- ]
61
-
62
- def _info(self):
63
- features = datasets.Features(
64
- {
65
- "answers": datasets.Value("string"),
66
- "options": datasets.features.Sequence(datasets.Value("string")),
67
- "article": datasets.Value("string"),
68
- "id": datasets.Value("string"),
69
- }
70
- )
71
- return datasets.DatasetInfo(
72
- description=f"{_DESCRIPTION}\n{self.config.description}",
73
- features=features,
74
- homepage=_HOMEPAGE,
75
- license=_LICENSE,
76
- citation=_CITATION,
77
- )
78
-
79
- def _split_generators(self, dl_manager):
80
- urls = _URLS
81
- data_dir = dl_manager.download_and_extract(urls)
82
- return [
83
- datasets.SplitGenerator(
84
- name=datasets.Split.TRAIN,
85
- # These kwargs will be passed to _generate_examples
86
- gen_kwargs={
87
- "basepath": os.path.join(
88
- data_dir, "MuTual-master", "data", self.config.name, "train"
89
- ),
90
- "split": "train",
91
- },
92
- ),
93
- datasets.SplitGenerator(
94
- name=datasets.Split.TEST,
95
- # These kwargs will be passed to _generate_examples
96
- gen_kwargs={
97
- "basepath": os.path.join(
98
- data_dir, "MuTual-master", "data", self.config.name, "test"
99
- ),
100
- "split": "test",
101
- },
102
- ),
103
- datasets.SplitGenerator(
104
- name=datasets.Split.VALIDATION,
105
- # These kwargs will be passed to _generate_examples
106
- gen_kwargs={
107
- "basepath": os.path.join(
108
- data_dir, "MuTual-master", "data", self.config.name, "dev"
109
- ),
110
- "split": "dev",
111
- },
112
- ),
113
- ]
114
-
115
- # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
116
- def _generate_examples(self, basepath, split):
117
- # TODO: This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
118
- # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
119
- key = 0
120
- for file in sorted(Path(basepath).iterdir()):
121
- if file.suffix != ".txt":
122
- continue
123
- with open(file, "r", encoding="utf-8") as f:
124
- data_str = f.read()
125
- # Ignore the occasional empty file.
126
- if not data_str:
127
- continue
128
- data = json.loads(data_str)
129
- yield key, {
130
- "answers": data["answers"],
131
- "options": data["options"],
132
- "article": data["article"],
133
- "id": data["id"],
134
- }
135
- key += 1