vtsouval commited on
Commit
f4c7ca5
·
1 Parent(s): f1c541e

Initial upload of medley-solos-db dataset script

Browse files
Files changed (2) hide show
  1. README.md +20 -0
  2. medley-solos-db/medley-solos-db.py +251 -0
README.md ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Medley-Solos-DB
2
+
3
+ Medley-Solos-DB is a dataset of monophonic instrument solos across 8 musical classes:
4
+ - clarinet
5
+ - distorted electric guitar
6
+ - female singer
7
+ - flute
8
+ - piano
9
+ - tenor saxophone
10
+ - trumpet
11
+ - violin
12
+
13
+ Each sample is labeled and paired with audio data at 44.1 kHz sampling rate.
14
+
15
+ ## Source
16
+ Originally published on [Zenodo](https://zenodo.org/record/3464194).
17
+
18
+ ## License
19
+ Creative Commons Attribution 4.0 International (CC BY 4.0)
20
+
medley-solos-db/medley-solos-db.py ADDED
@@ -0,0 +1,251 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+
3
+ """Medley-Solos-DB dataset."""
4
+
5
+
6
+ import os
7
+ import gzip
8
+ import shutil
9
+ import pathlib
10
+ import datasets
11
+ import typing as tp
12
+ import pandas as pd
13
+ import urllib.request
14
+ from pathlib import Path
15
+ from rich import print
16
+ from tqdm.auto import tqdm
17
+
18
+
19
+ SAMPLE_RATE = 44_100
20
+
21
+ # Cache location
22
+ VERSION = "0.0.1"
23
+ DEFAULT_XDG_CACHE_HOME = "~/.cache"
24
+ XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
25
+ DEFAULT_HF_CACHE_HOME = os.path.join(XDG_CACHE_HOME, "huggingface")
26
+ HF_CACHE_HOME = os.path.expanduser(os.getenv("HF_HOME", DEFAULT_HF_CACHE_HOME))
27
+ DEFAULT_HF_DATASETS_CACHE = os.path.join(HF_CACHE_HOME, "datasets")
28
+ HF_DATASETS_CACHE = Path(os.getenv("HF_DATASETS_CACHE", DEFAULT_HF_DATASETS_CACHE))
29
+
30
+ CLASSES = [
31
+ 'clarinet', 'distorted electric guitar', 'female singer', 'flute', 'piano', 'tenor saxophone', 'trumpet', 'violin'
32
+ ]
33
+ CLASS2INDEX = {cls:idx for idx, cls in enumerate(CLASSES)}
34
+ INDEX2CLASS = {idx:cls for idx, cls in enumerate(CLASSES)}
35
+
36
+
37
+ class MedleySolosDBConfig(datasets.BuilderConfig):
38
+ """BuilderConfig for Medley-Solos-DB."""
39
+
40
+ def __init__(self, features, **kwargs):
41
+ super(MedleySolosDBConfig, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
42
+ self.features = features
43
+
44
+
45
+ class MedleySolosDB(datasets.GeneratorBasedBuilder):
46
+
47
+ BUILDER_CONFIGS = [
48
+ MedleySolosDBConfig(
49
+ features=datasets.Features(
50
+ {
51
+ "audio": datasets.Audio(sampling_rate=SAMPLE_RATE),
52
+ "instrument": datasets.Value("string"),
53
+ "label": datasets.features.ClassLabel(names=CLASSES),
54
+ }
55
+ ),
56
+ name="v1.2",
57
+ description="",
58
+ ),
59
+ ]
60
+
61
+ DEFAULT_CONFIG_NAME = "v1.2"
62
+
63
+ def _info(self):
64
+ return datasets.DatasetInfo(
65
+ description="",
66
+ features=self.config.features,
67
+ supervised_keys=None,
68
+ homepage="",
69
+ citation="",
70
+ )
71
+
72
+ def _split_generators(self, dl_manager):
73
+ """Returns SplitGenerators."""
74
+ zip_file_url = "https://zenodo.org/records/3464194/files/Medley-solos-DB.tar.gz"
75
+ _filename = zip_file_url.split('/')[-1]
76
+ _save_path = os.path.join(
77
+ HF_DATASETS_CACHE, 'confit___medley-solos-db/v1.2', VERSION, _filename
78
+ )
79
+ download_file(zip_file_url, _save_path)
80
+ print(f"`{_filename}` is downloaded to {_save_path}")
81
+ archive_path = dl_manager.extract(_save_path)
82
+ print(f"`{_filename}` is now extracted to {archive_path}")
83
+
84
+ metadata_df = pd.read_csv("https://zenodo.org/records/3464194/files/Medley-solos-DB_metadata.csv")
85
+ train_df = metadata_df[metadata_df["subset"] == "training"].reset_index(drop=True)
86
+ validation_df = metadata_df[metadata_df["subset"] == "validation"].reset_index(drop=True)
87
+ test_df = metadata_df[metadata_df["subset"] == "test"].reset_index(drop=True)
88
+
89
+ extensions = ['.wav']
90
+ _, _walker = fast_scandir(archive_path, extensions, recursive=True)
91
+
92
+ def get_fileid2class(df, split=None):
93
+ fileid2class = {}
94
+ for idx, row in df.iterrows():
95
+ _fileid = row['uuid4']
96
+ class_ = row['instrument']
97
+ class_id = row['instrument_id']
98
+ fileid = f"Medley-solos-DB_{split}-{class_id}_{_fileid}"
99
+ fileid2class[fileid] = class_
100
+ return fileid2class
101
+
102
+ train_fileid2class = get_fileid2class(train_df, 'training')
103
+ validation_fileid2class = get_fileid2class(validation_df, 'validation')
104
+ test_fileid2class = get_fileid2class(test_df, 'test')
105
+
106
+ return [
107
+ datasets.SplitGenerator(
108
+ name=datasets.Split.TRAIN, gen_kwargs={"filepaths": _walker, "split": "train", "fileid2class": train_fileid2class}
109
+ ),
110
+ datasets.SplitGenerator(
111
+ name=datasets.Split.VALIDATION, gen_kwargs={"filepaths": _walker, "split": "validation", "fileid2class": validation_fileid2class}
112
+ ),
113
+ datasets.SplitGenerator(
114
+ name=datasets.Split.TEST, gen_kwargs={"filepaths": _walker, "split": "test", "fileid2class": test_fileid2class}
115
+ ),
116
+ ]
117
+
118
+ def _generate_examples(self, filepaths, split=None, fileid2class=None):
119
+ for guid, audio_path in enumerate(filepaths):
120
+ fileid = Path(audio_path).stem
121
+ if fileid not in fileid2class:
122
+ continue
123
+ instrument = fileid2class.get(fileid)
124
+ yield guid, {
125
+ "id": str(guid),
126
+ "audio": audio_path,
127
+ "instrument": instrument,
128
+ "label": instrument,
129
+ }
130
+
131
+
132
+
133
+ def fast_scandir(path: str, exts: tp.List[str], recursive: bool = False):
134
+ # Scan files recursively faster than glob
135
+ # From github.com/drscotthawley/aeiou/blob/main/aeiou/core.py
136
+ subfolders, files = [], []
137
+
138
+ try: # hope to avoid 'permission denied' by this try
139
+ for f in os.scandir(path):
140
+ try: # 'hope to avoid too many levels of symbolic links' error
141
+ if f.is_dir():
142
+ subfolders.append(f.path)
143
+ elif f.is_file():
144
+ if os.path.splitext(f.name)[1].lower() in exts:
145
+ files.append(f.path)
146
+ except Exception:
147
+ pass
148
+ except Exception:
149
+ pass
150
+
151
+ if recursive:
152
+ for path in list(subfolders):
153
+ sf, f = fast_scandir(path, exts, recursive=recursive)
154
+ subfolders.extend(sf)
155
+ files.extend(f) # type: ignore
156
+
157
+ return subfolders, files
158
+
159
+
160
+ def download_file(
161
+ source,
162
+ dest,
163
+ unpack=False,
164
+ dest_unpack=None,
165
+ replace_existing=False,
166
+ write_permissions=False,
167
+ ):
168
+ """Downloads the file from the given source and saves it in the given
169
+ destination path.
170
+ Arguments
171
+ ---------
172
+ source : path or url
173
+ Path of the source file. If the source is an URL, it downloads it from
174
+ the web.
175
+ dest : path
176
+ Destination path.
177
+ unpack : bool
178
+ If True, it unpacks the data in the dest folder.
179
+ dest_unpack: path
180
+ Path where to store the unpacked dataset
181
+ replace_existing : bool
182
+ If True, replaces the existing files.
183
+ write_permissions: bool
184
+ When set to True, all the files in the dest_unpack directory will be granted write permissions.
185
+ This option is active only when unpack=True.
186
+ """
187
+ class DownloadProgressBar(tqdm):
188
+ """DownloadProgressBar class."""
189
+
190
+ def update_to(self, b=1, bsize=1, tsize=None):
191
+ """Needed to support multigpu training."""
192
+ if tsize is not None:
193
+ self.total = tsize
194
+ self.update(b * bsize - self.n)
195
+
196
+ # Create the destination directory if it doesn't exist
197
+ dest_dir = pathlib.Path(dest).resolve().parent
198
+ dest_dir.mkdir(parents=True, exist_ok=True)
199
+ if "http" not in source:
200
+ shutil.copyfile(source, dest)
201
+
202
+ elif not os.path.isfile(dest) or (
203
+ os.path.isfile(dest) and replace_existing
204
+ ):
205
+ print(f"Downloading {source} to {dest}")
206
+ with DownloadProgressBar(
207
+ unit="B",
208
+ unit_scale=True,
209
+ miniters=1,
210
+ desc=source.split("/")[-1],
211
+ ) as t:
212
+ urllib.request.urlretrieve(
213
+ source, filename=dest, reporthook=t.update_to
214
+ )
215
+ else:
216
+ print(f"{dest} exists. Skipping download")
217
+
218
+ # Unpack if necessary
219
+ if unpack:
220
+ if dest_unpack is None:
221
+ dest_unpack = os.path.dirname(dest)
222
+ print(f"Extracting {dest} to {dest_unpack}")
223
+ # shutil unpack_archive does not work with tar.gz files
224
+ if (
225
+ source.endswith(".tar.gz")
226
+ or source.endswith(".tgz")
227
+ or source.endswith(".gz")
228
+ ):
229
+ out = dest.replace(".gz", "")
230
+ with gzip.open(dest, "rb") as f_in:
231
+ with open(out, "wb") as f_out:
232
+ shutil.copyfileobj(f_in, f_out)
233
+ else:
234
+ shutil.unpack_archive(dest, dest_unpack)
235
+ if write_permissions:
236
+ set_writing_permissions(dest_unpack)
237
+
238
+
239
+ def set_writing_permissions(folder_path):
240
+ """
241
+ This function sets user writing permissions to all the files in the given folder.
242
+ Arguments
243
+ ---------
244
+ folder_path : folder
245
+ Folder whose files will be granted write permissions.
246
+ """
247
+ for root, dirs, files in os.walk(folder_path):
248
+ for file_name in files:
249
+ file_path = os.path.join(root, file_name)
250
+ # Set writing permissions (mode 0o666) to the file
251
+ os.chmod(file_path, 0o666)