yangwang825 commited on
Commit
4c1ab6a
·
verified ·
1 Parent(s): e331655

Update arca23k.py

Browse files
Files changed (1) hide show
  1. arca23k.py +211 -3
arca23k.py CHANGED
@@ -21,6 +21,8 @@ from tqdm.auto import tqdm
21
  from rich.logging import RichHandler
22
  from huggingface_hub import hf_hub_download
23
 
 
 
24
  logger = logging.getLogger(__name__)
25
  logger.addHandler(RichHandler())
26
  logger.setLevel(logging.INFO)
@@ -29,8 +31,6 @@ VERSION = "0.0.1"
29
 
30
  SAMPLE_RATE = 44_100
31
 
32
- CLASSES = list(set(LABEL2ID.keys()))
33
-
34
  # Cache location
35
  DEFAULT_XDG_CACHE_HOME = "~/.cache"
36
  XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
@@ -45,4 +45,212 @@ class ARCA23KConfig(datasets.BuilderConfig):
45
 
46
  def __init__(self, features, **kwargs):
47
  super(ARCA23KConfig, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
48
- self.features = features
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  from rich.logging import RichHandler
22
  from huggingface_hub import hf_hub_download
23
 
24
+ from ._arca23k import CLASSES, COARSE_TO_FINE
25
+
26
  logger = logging.getLogger(__name__)
27
  logger.addHandler(RichHandler())
28
  logger.setLevel(logging.INFO)
 
31
 
32
  SAMPLE_RATE = 44_100
33
 
 
 
34
  # Cache location
35
  DEFAULT_XDG_CACHE_HOME = "~/.cache"
36
  XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
 
45
 
46
  def __init__(self, features, **kwargs):
47
  super(ARCA23KConfig, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
48
+ self.features = features
49
+
50
+
51
+ class ARCA23K(datasets.GeneratorBasedBuilder):
52
+
53
+ BUILDER_CONFIGS = [
54
+ ARCA23KConfig(
55
+ features=datasets.Features(
56
+ {
57
+ "file": datasets.Value("string"),
58
+ "audio": datasets.Audio(sampling_rate=SAMPLE_RATE),
59
+ "sound": datasets.Value("string"),
60
+ "label": datasets.features.ClassLabel(names=CLASSES),
61
+ }
62
+ ),
63
+ name="v1.0",
64
+ description="",
65
+ ),
66
+ ]
67
+
68
+ DEFAULT_CONFIG_NAME = "v1.0"
69
+
70
+ def _info(self):
71
+ return datasets.DatasetInfo(
72
+ description="",
73
+ features=self.config.features,
74
+ supervised_keys=None,
75
+ homepage="https://zenodo.org/records/5117901",
76
+ citation="",
77
+ task_templates=None,
78
+ )
79
+
80
+ def _split_generators(self, dl_manager):
81
+ """Returns SplitGenerators."""
82
+ for zip_type in ['zip', 'z01', 'z02', 'z03', 'z04']:
83
+ _filename = f'ARCA23K.audio.{zip_type}'
84
+ _zip_file_url = f'https://zenodo.org/records/5117901/files/ARCA23K.audio.{zip_type}'
85
+ _save_path = os.path.join(
86
+ HF_DATASETS_CACHE, 'confit___arca23k/v1.0', VERSION, _filename
87
+ )
88
+ download_file(_zip_file_url, os.path.join(_save_path, _filename))
89
+ logger.info(f"`{_filename}` is downloaded to {_save_path}")
90
+
91
+ main_zip_filename = 'ARCA23K.audio.zip'
92
+ concat_zip_filename = 'ARCA23K.audio.full.zip'
93
+ _input_file = os.path.join(HF_DATASETS_CACHE, 'confit___arca23k/v1.0', VERSION, main_zip_filename)
94
+ _output_file = os.path.join(HF_DATASETS_CACHE, 'confit___arca23k/v1.0', VERSION, concat_zip_filename)
95
+
96
+ if not os.path.exists(_output_file):
97
+ logger.info(f"Reassemble {_output_file} file")
98
+ os.system(f"zip -q -F {_input_file} --out {_output_file}")
99
+ archive_path = dl_manager.extract(_output_file)
100
+ logger.info(f"`{concat_zip_filename}` is downloaded to {archive_path}")
101
+
102
+ metadata_path = dl_manager.download_and_extract("https://zenodo.org/records/5117901/files/ARCA23K-FSD.ground_truth.zip")
103
+ train_df = pd.read_csv(os.path.join(metadata_path, 'train.csv'))
104
+ validation_df = pd.read_csv(os.path.join(metadata_path, 'val.csv'))
105
+ test_df = pd.read_csv(os.path.join(metadata_path, 'test.csv'))
106
+
107
+ return [
108
+ datasets.SplitGenerator(
109
+ name=datasets.Split.TRAIN, gen_kwargs={"archive_path": archive_path, "split": "train", "metadata": train_df}
110
+ ),
111
+ datasets.SplitGenerator(
112
+ name=datasets.Split.VALIDATION, gen_kwargs={"archive_path": archive_path, "split": "validation", "metadata": validation_df}
113
+ ),
114
+ datasets.SplitGenerator(
115
+ name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": "test", "metadata": test_df}
116
+ ),
117
+ ]
118
+
119
+ def _generate_examples(self, archive_path, split=None, metadata=None):
120
+ extensions = ['.wav']
121
+ _, _walker = fast_scandir(archive_path, extensions, recursive=True)
122
+
123
+ fileid2class = {}
124
+ for idx, row in metadata.iterrows():
125
+ fileid2index[row['fname']] = row['label'] # this filename doesn't have suffix
126
+
127
+ for guid, audio_path in enumerate(_walker):
128
+ fileid = Path(audio_path).stem
129
+ yield guid, {
130
+ "id": str(guid),
131
+ "file": audio_path,
132
+ "audio": audio_path,
133
+ "sound": fileid2index.get(fileid),
134
+ "label": fileid2index.get(fileid),
135
+ }
136
+
137
+
138
+ def fast_scandir(path: str, exts: tp.List[str], recursive: bool = False):
139
+ # Scan files recursively faster than glob
140
+ # From github.com/drscotthawley/aeiou/blob/main/aeiou/core.py
141
+ subfolders, files = [], []
142
+
143
+ try: # hope to avoid 'permission denied' by this try
144
+ for f in os.scandir(path):
145
+ try: # 'hope to avoid too many levels of symbolic links' error
146
+ if f.is_dir():
147
+ subfolders.append(f.path)
148
+ elif f.is_file():
149
+ if os.path.splitext(f.name)[1].lower() in exts:
150
+ files.append(f.path)
151
+ except Exception:
152
+ pass
153
+ except Exception:
154
+ pass
155
+
156
+ if recursive:
157
+ for path in list(subfolders):
158
+ sf, f = fast_scandir(path, exts, recursive=recursive)
159
+ subfolders.extend(sf)
160
+ files.extend(f) # type: ignore
161
+
162
+ return subfolders, files
163
+
164
+
165
+ def download_file(
166
+ source,
167
+ dest,
168
+ unpack=False,
169
+ dest_unpack=None,
170
+ replace_existing=False,
171
+ write_permissions=False,
172
+ ):
173
+ """Downloads the file from the given source and saves it in the given
174
+ destination path.
175
+ Arguments
176
+ ---------
177
+ source : path or url
178
+ Path of the source file. If the source is an URL, it downloads it from
179
+ the web.
180
+ dest : path
181
+ Destination path.
182
+ unpack : bool
183
+ If True, it unpacks the data in the dest folder.
184
+ dest_unpack: path
185
+ Path where to store the unpacked dataset
186
+ replace_existing : bool
187
+ If True, replaces the existing files.
188
+ write_permissions: bool
189
+ When set to True, all the files in the dest_unpack directory will be granted write permissions.
190
+ This option is active only when unpack=True.
191
+ """
192
+ class DownloadProgressBar(tqdm):
193
+ """DownloadProgressBar class."""
194
+
195
+ def update_to(self, b=1, bsize=1, tsize=None):
196
+ """Needed to support multigpu training."""
197
+ if tsize is not None:
198
+ self.total = tsize
199
+ self.update(b * bsize - self.n)
200
+
201
+ # Create the destination directory if it doesn't exist
202
+ dest_dir = pathlib.Path(dest).resolve().parent
203
+ dest_dir.mkdir(parents=True, exist_ok=True)
204
+ if "http" not in source:
205
+ shutil.copyfile(source, dest)
206
+
207
+ elif not os.path.isfile(dest) or (
208
+ os.path.isfile(dest) and replace_existing
209
+ ):
210
+ print(f"Downloading {source} to {dest}")
211
+ with DownloadProgressBar(
212
+ unit="B",
213
+ unit_scale=True,
214
+ miniters=1,
215
+ desc=source.split("/")[-1],
216
+ ) as t:
217
+ urllib.request.urlretrieve(
218
+ source, filename=dest, reporthook=t.update_to
219
+ )
220
+ else:
221
+ print(f"{dest} exists. Skipping download")
222
+
223
+ # Unpack if necessary
224
+ if unpack:
225
+ if dest_unpack is None:
226
+ dest_unpack = os.path.dirname(dest)
227
+ print(f"Extracting {dest} to {dest_unpack}")
228
+ # shutil unpack_archive does not work with tar.gz files
229
+ if (
230
+ source.endswith(".tar.gz")
231
+ or source.endswith(".tgz")
232
+ or source.endswith(".gz")
233
+ ):
234
+ out = dest.replace(".gz", "")
235
+ with gzip.open(dest, "rb") as f_in:
236
+ with open(out, "wb") as f_out:
237
+ shutil.copyfileobj(f_in, f_out)
238
+ else:
239
+ shutil.unpack_archive(dest, dest_unpack)
240
+ if write_permissions:
241
+ set_writing_permissions(dest_unpack)
242
+
243
+
244
+ def set_writing_permissions(folder_path):
245
+ """
246
+ This function sets user writing permissions to all the files in the given folder.
247
+ Arguments
248
+ ---------
249
+ folder_path : folder
250
+ Folder whose files will be granted write permissions.
251
+ """
252
+ for root, dirs, files in os.walk(folder_path):
253
+ for file_name in files:
254
+ file_path = os.path.join(root, file_name)
255
+ # Set writing permissions (mode 0o666) to the file
256
+ os.chmod(file_path, 0o666)