thomasyu888 commited on
Commit
12ff1d7
·
1 Parent(s): d34d6c8

Update testdataset.py

Browse files
Files changed (1) hide show
  1. testdataset.py +39 -27
testdataset.py CHANGED
@@ -14,14 +14,9 @@
14
  # limitations under the License.
15
 
16
  """
17
- This is inpsired from the mednil implementation: https://huggingface.co/datasets/bigbio/mednli/blob/main/mednli.py
18
-
19
- The files comprising this dataset must be on the users local machine in a single directory that is
20
- passed to `datasets.load_datset` via the `data_dir` kwarg. This loader script will read the archive
21
- files directly (i.e. the user should not uncompress, untar or unzip any of the files). For example,
22
- if `data_dir` is `"testdataset"` it should contain the following files:
23
- testdataset
24
- ├── testdataset.zip
25
  """
26
 
27
  import csv
@@ -45,7 +40,7 @@ Test Dataset
45
 
46
  _CITATION = ""
47
 
48
- _HOMEPAGE = "https://www.synapse.org/"
49
 
50
  _LICENSE = "other"
51
 
@@ -54,20 +49,21 @@ _URLS = {}
54
  _SOURCE_VERSION = "1.0.0"
55
  _BIGBIO_VERSION = "1.0.0"
56
 
 
57
  _SYN_ID = "syn51520473"
58
 
59
- @dataclass
60
- class BigBioConfig(datasets.BuilderConfig):
61
- """BuilderConfig for BigBio."""
62
 
63
- name: str = None
64
- version: datasets.Version = None
65
- description: str = None
66
- schema: str = None
67
- subset_id: str = None
68
 
69
 
70
- def download_from_synapse(syn_id, path):
71
  """Download files from a Synapse folder or project containing test, train, dev csv files
72
 
73
  Args:
@@ -79,17 +75,23 @@ def download_from_synapse(syn_id, path):
79
  import synapseutils
80
  except ModuleNotFoundError as e:
81
  raise ModuleNotFoundError("synapseclient must be installed. pip install synapseclient")
82
- syn = synapseclient.login()
 
 
 
 
 
83
  synapseutils.syncFromSynapse(syn, syn_id, path=path)
84
  # syn.get(entity=syn_id, downloadLocation=path)
85
 
86
 
87
  class TestDataset(datasets.GeneratorBasedBuilder):
88
- """MedNLI"""
89
 
90
  SOURCE_VERSION = datasets.Version(_SOURCE_VERSION)
91
- BIGBIO_VERSION = datasets.Version(_BIGBIO_VERSION)
92
 
 
93
  # BUILDER_CONFIGS = [
94
  # BigBioConfig(
95
  # name="default",
@@ -110,6 +112,7 @@ class TestDataset(datasets.GeneratorBasedBuilder):
110
  DEFAULT_CONFIG_NAME = "testdataset_source"
111
 
112
  def _info(self) -> datasets.DatasetInfo:
 
113
  features = datasets.Features(
114
  {
115
  "id": datasets.Value("string"),
@@ -122,25 +125,32 @@ class TestDataset(datasets.GeneratorBasedBuilder):
122
  )
123
 
124
  return datasets.DatasetInfo(
 
125
  description=_DESCRIPTION,
 
126
  features=features,
 
 
 
 
 
127
  homepage=_HOMEPAGE,
 
128
  license=str(_LICENSE),
129
  citation=_CITATION,
130
  )
131
 
132
  def _split_generators(self, dl_manager) -> List[datasets.SplitGenerator]:
 
 
 
133
  # This is where we can call the custom download function for Synapse
 
134
  data_dir = dl_manager.download_custom(_SYN_ID, download_from_synapse)
135
- # data_dir = self.config.data_dir
136
- if data_dir is None:
137
- raise ValueError(
138
- "This is a local dataset. Please pass the data_dir kwarg to load_dataset."
139
- )
140
-
141
  return [
142
  datasets.SplitGenerator(
143
  name=datasets.Split.TRAIN,
 
144
  gen_kwargs={
145
  "filepath": os.path.join(data_dir, "train.csv"),
146
  "split": "train",
@@ -163,6 +173,8 @@ class TestDataset(datasets.GeneratorBasedBuilder):
163
  ]
164
 
165
  def _generate_examples(self, filepath, split: str) -> Tuple[int, Dict]:
 
 
166
  with open(filepath, "r") as f:
167
  test = csv.DictReader(f)
168
  for row in test:
 
14
  # limitations under the License.
15
 
16
  """
17
+ This implementation specifies a custom synapse download function that allows
18
+ users to leverage the HF datasets API to download files through the Synapse API given
19
+ Synapse authentication
 
 
 
 
 
20
  """
21
 
22
  import csv
 
40
 
41
  _CITATION = ""
42
 
43
+ _HOMEPAGE = "https://www.synapse.org/#!Synapse:syn51520471"
44
 
45
  _LICENSE = "other"
46
 
 
49
  _SOURCE_VERSION = "1.0.0"
50
  _BIGBIO_VERSION = "1.0.0"
51
 
52
+ # This must be a synapse id of a synapse folder or project
53
  _SYN_ID = "syn51520473"
54
 
55
+ # @dataclass
56
+ # class BigBioConfig(datasets.BuilderConfig):
57
+ # """BuilderConfig for BigBio."""
58
 
59
+ # name: str = None
60
+ # version: datasets.Version = None
61
+ # description: str = None
62
+ # schema: str = None
63
+ # subset_id: str = None
64
 
65
 
66
+ def download_from_synapse(syn_id: str, path: str):
67
  """Download files from a Synapse folder or project containing test, train, dev csv files
68
 
69
  Args:
 
75
  import synapseutils
76
  except ModuleNotFoundError as e:
77
  raise ModuleNotFoundError("synapseclient must be installed. pip install synapseclient")
78
+
79
+ try:
80
+ syn = synapseclient.login()
81
+ except Exception:
82
+ raise Exception("Please create a Synapse personal access token and either set up ~/.synapseConfig or `export SYNAPSE_AUTH_TOKEN=<PAT>`")
83
+
84
  synapseutils.syncFromSynapse(syn, syn_id, path=path)
85
  # syn.get(entity=syn_id, downloadLocation=path)
86
 
87
 
88
  class TestDataset(datasets.GeneratorBasedBuilder):
89
+ """Test Dataset"""
90
 
91
  SOURCE_VERSION = datasets.Version(_SOURCE_VERSION)
92
+ # BIGBIO_VERSION = datasets.Version(_BIGBIO_VERSION)
93
 
94
+ # TODO: Add in builder configs
95
  # BUILDER_CONFIGS = [
96
  # BigBioConfig(
97
  # name="default",
 
112
  DEFAULT_CONFIG_NAME = "testdataset_source"
113
 
114
  def _info(self) -> datasets.DatasetInfo:
115
+ # TODO: This method specifies the datasets.DatasetInfo object which contains informations and typings for the dataset
116
  features = datasets.Features(
117
  {
118
  "id": datasets.Value("string"),
 
125
  )
126
 
127
  return datasets.DatasetInfo(
128
+ # This is the description that will appear on the datasets page.
129
  description=_DESCRIPTION,
130
+ # This defines the different columns of the dataset and their types
131
  features=features,
132
+ # Here we define them above because they are different between the two configurations
133
+ # If there's a common (input, target) tuple from the features, uncomment supervised_keys line below and
134
+ # specify them. They'll be used if as_supervised=True in builder.as_dataset.
135
+ # supervised_keys=("sentence", "label"),
136
+ # Homepage of the dataset for documentation
137
  homepage=_HOMEPAGE,
138
+ # License for the dataset if available
139
  license=str(_LICENSE),
140
  citation=_CITATION,
141
  )
142
 
143
  def _split_generators(self, dl_manager) -> List[datasets.SplitGenerator]:
144
+ # TODO: This method is tasked with downloading/extracting the data and defining the splits depending on the configuration
145
+ # If several configurations are possible (listed in BUILDER_CONFIGS), the configuration selected by the user is in self.config.name
146
+
147
  # This is where we can call the custom download function for Synapse
148
+ # https://huggingface.co/docs/datasets/v2.12.0/en/package_reference/builder_classes#datasets.DownloadManager.download_custom
149
  data_dir = dl_manager.download_custom(_SYN_ID, download_from_synapse)
 
 
 
 
 
 
150
  return [
151
  datasets.SplitGenerator(
152
  name=datasets.Split.TRAIN,
153
+ # These kwargs will be passed to _generate_examples
154
  gen_kwargs={
155
  "filepath": os.path.join(data_dir, "train.csv"),
156
  "split": "train",
 
173
  ]
174
 
175
  def _generate_examples(self, filepath, split: str) -> Tuple[int, Dict]:
176
+ # TODO: This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
177
+ # The `row['id']` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
178
  with open(filepath, "r") as f:
179
  test = csv.DictReader(f)
180
  for row in test: