yangwang825 commited on
Commit
f6b2bff
·
verified ·
1 Parent(s): e429e40

Update fsdkaggle2019-script.py

Browse files
Files changed (1) hide show
  1. fsdkaggle2019-script.py +82 -3
fsdkaggle2019-script.py CHANGED
@@ -4,6 +4,7 @@
4
 
5
 
6
  import os
 
7
  import textwrap
8
  import datasets
9
  import itertools
@@ -11,9 +12,12 @@ import pandas as pd
11
  import typing as tp
12
  from pathlib import Path
13
  from copy import deepcopy
 
14
 
15
  from ._fsd2019 import CLASSES
16
 
 
 
17
  SAMPLE_RATE = 44_100
18
 
19
  _TRAIN_CURATED_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.audio_train_curated.zip"
@@ -29,12 +33,20 @@ _TRAIN_NOISY_URLS = [
29
  _TEST_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.audio_test.zip"
30
  _METADATA_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.meta.zip"
31
 
 
 
 
 
 
 
 
 
32
 
33
  class FSDKaggle2019Config(datasets.BuilderConfig):
34
  """BuilderConfig for FSDKaggle2019."""
35
 
36
  def __init__(self, features, **kwargs):
37
- super(FSDKaggle2019Config, self).__init__(version=datasets.Version("0.0.1", ""), **kwargs)
38
  self.features = features
39
 
40
 
@@ -82,8 +94,16 @@ class FSDKaggle2019(datasets.GeneratorBasedBuilder):
82
  if self.config.name == 'curated':
83
  train_archive_path = dl_manager.download_and_extract(_TRAIN_CURATED_URL)
84
  elif self.config.name == 'noisy':
85
- train_archive_paths = dl_manager.download_and_extract(_TRAIN_NOISY_URLS)
86
- assert False, f'{train_archive_paths}'
 
 
 
 
 
 
 
 
87
  test_archive_path = dl_manager.download_and_extract(_TEST_URL)
88
  metadata_archive_path = dl_manager.download_and_extract(_METADATA_URL)
89
 
@@ -148,3 +168,62 @@ def fast_scandir(path: str, exts: tp.List[str], recursive: bool = False):
148
  files.extend(f) # type: ignore
149
 
150
  return subfolders, files
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
 
6
  import os
7
+ import requests
8
  import textwrap
9
  import datasets
10
  import itertools
 
12
  import typing as tp
13
  from pathlib import Path
14
  from copy import deepcopy
15
+ from tqdm.auto import tqdm
16
 
17
  from ._fsd2019 import CLASSES
18
 
19
+ VERSION = "0.0.1"
20
+
21
  SAMPLE_RATE = 44_100
22
 
23
  _TRAIN_CURATED_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.audio_train_curated.zip"
 
33
  _TEST_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.audio_test.zip"
34
  _METADATA_URL = "https://zenodo.org/records/3612637/files/FSDKaggle2019.meta.zip"
35
 
36
+ # Cache location
37
+ DEFAULT_XDG_CACHE_HOME = "~/.cache"
38
+ XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
39
+ DEFAULT_HF_CACHE_HOME = os.path.join(XDG_CACHE_HOME, "huggingface")
40
+ HF_CACHE_HOME = os.path.expanduser(os.getenv("HF_HOME", DEFAULT_HF_CACHE_HOME))
41
+ DEFAULT_HF_DATASETS_CACHE = os.path.join(HF_CACHE_HOME, "datasets")
42
+ HF_DATASETS_CACHE = Path(os.getenv("HF_DATASETS_CACHE", DEFAULT_HF_DATASETS_CACHE))
43
+
44
 
45
  class FSDKaggle2019Config(datasets.BuilderConfig):
46
  """BuilderConfig for FSDKaggle2019."""
47
 
48
  def __init__(self, features, **kwargs):
49
+ super(FSDKaggle2019Config, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
50
  self.features = features
51
 
52
 
 
94
  if self.config.name == 'curated':
95
  train_archive_path = dl_manager.download_and_extract(_TRAIN_CURATED_URL)
96
  elif self.config.name == 'noisy':
97
+ for zip_file_url in _TRAIN_NOISY_URLS:
98
+ name = zip_file_url.split("/")[-1]
99
+ download_file(
100
+ zip_file_url,
101
+ os.path.join('confit___fsdkaggle2019-script/noisy', VERSION, name)
102
+ )
103
+ _input_file = os.path.join('confit___fsdkaggle2019-script/noisy', VERSION, 'FSDKaggle2019.audio_train_noisy.zip')
104
+ _output_file = os.path.join('confit___fsdkaggle2019-script/noisy', VERSION, 'FSDKaggle2019.audio_train_noisy.combine.zip')
105
+ os.system(f"zip -F {_input_file} --out {_output_file}")
106
+ train_archive_path = dl_manager.extract(_output_file)
107
  test_archive_path = dl_manager.download_and_extract(_TEST_URL)
108
  metadata_archive_path = dl_manager.download_and_extract(_METADATA_URL)
109
 
 
168
  files.extend(f) # type: ignore
169
 
170
  return subfolders, files
171
+
172
+
173
+ def download_file(
174
+ download_url, download_path, split_name, filename, resume_byte_pos=None
175
+ ):
176
+ """
177
+ Download file from given URL
178
+
179
+ Arguments
180
+ ---------
181
+ download_url : str
182
+ URL of file being downloaded
183
+ download_path : str
184
+ Full path of the file that is to be downloaded
185
+ (or already downloaded)
186
+ split_name : str
187
+ Split name of the file being downloaded
188
+ e.g. read_speech
189
+ filename : str
190
+ Filename of the file being downloaded
191
+ resume_byte_pos: (int, optional)
192
+ Starting byte position for resuming the download.
193
+ Default is None, which means a fresh download.
194
+
195
+ Returns
196
+ -------
197
+ bool
198
+ If True, the file need not be downloaded again.
199
+ Else the download might have failed or is incomplete.
200
+ """
201
+ print("Downloading:", split_name, "=>", filename)
202
+ resume_header = (
203
+ {"Range": f"bytes={resume_byte_pos}-"} if resume_byte_pos else None
204
+ )
205
+ response = requests.get(download_url, headers=resume_header, stream=True)
206
+ file_size = int(response.headers.get("Content-Length"))
207
+
208
+ mode = "ab" if resume_byte_pos else "wb"
209
+ initial_pos = resume_byte_pos if resume_byte_pos else 0
210
+
211
+ with open(download_path, mode) as f:
212
+ with tqdm(
213
+ total=file_size,
214
+ unit="B",
215
+ unit_scale=True,
216
+ unit_divisor=1024,
217
+ initial=initial_pos,
218
+ miniters=1,
219
+ ) as pbar:
220
+ for chunk in response.iter_content(32 * 1024):
221
+ f.write(chunk)
222
+ pbar.update(len(chunk))
223
+
224
+ # Validate downloaded file
225
+ if validate_file(download_url, download_path):
226
+ return True
227
+ else:
228
+ print("Download failed. Moving on.")
229
+ return False