yangwang825 commited on
Commit
07c047e
·
verified ·
1 Parent(s): 362bc03

Create dcase2018-task3.py

Browse files
Files changed (1) hide show
  1. dcase2018-task3.py +256 -0
dcase2018-task3.py ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+
3
+ """DCASE2018 Task3 Bird Audio Detection dataset."""
4
+
5
+
6
+ import os
7
+ import gzip
8
+ import shutil
9
+ import pathlib
10
+ import logging
11
+ import datasets
12
+ import typing as tp
13
+ import pandas as pd
14
+ import urllib.request
15
+ from pathlib import Path
16
+ from copy import deepcopy
17
+ from tqdm.auto import tqdm
18
+ from rich.logging import RichHandler
19
+
20
+ logger = logging.getLogger(__name__)
21
+ logger.addHandler(RichHandler())
22
+ logger.setLevel(logging.INFO)
23
+
24
+ VERSION = "0.0.1"
25
+
26
+ SAMPLE_RATE = 44_100
27
+
28
+ # Cache location
29
+ DEFAULT_XDG_CACHE_HOME = "~/.cache"
30
+ XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
31
+ DEFAULT_HF_CACHE_HOME = os.path.join(XDG_CACHE_HOME, "huggingface")
32
+ HF_CACHE_HOME = os.path.expanduser(os.getenv("HF_HOME", DEFAULT_HF_CACHE_HOME))
33
+ DEFAULT_HF_DATASETS_CACHE = os.path.join(HF_CACHE_HOME, "datasets")
34
+ HF_DATASETS_CACHE = Path(os.getenv("HF_DATASETS_CACHE", DEFAULT_HF_DATASETS_CACHE))
35
+
36
+
37
+ class DCASE2018Task3KConfig(datasets.BuilderConfig):
38
+ """BuilderConfig for DCASE2018 Task3."""
39
+
40
+ def __init__(self, features, **kwargs):
41
+ super(DCASE2018Task3KConfig, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
42
+ self.features = features
43
+
44
+
45
+ class DCASE2018Task3(datasets.GeneratorBasedBuilder):
46
+
47
+ BUILDER_CONFIGS = [
48
+ DCASE2018Task3KConfig(
49
+ features=datasets.Features(
50
+ {
51
+ "file": datasets.Value("string"),
52
+ "audio": datasets.Audio(sampling_rate=None),
53
+ "label": datasets.features.ClassLabel(names=['absence', 'presence']),
54
+ }
55
+ ),
56
+ name="hidaka2022investigation",
57
+ description="",
58
+ ),
59
+ ]
60
+
61
+ DEFAULT_CONFIG_NAME = "hidaka2022investigation"
62
+
63
+ def _info(self):
64
+ return datasets.DatasetInfo(
65
+ description="",
66
+ features=self.config.features,
67
+ supervised_keys=None,
68
+ homepage="https://dcase.community/challenge2018/task-bird-audio-detection",
69
+ citation="",
70
+ task_templates=None,
71
+ )
72
+
73
+ def _split_generators(self, dl_manager):
74
+ """Returns SplitGenerators."""
75
+ extensions = ['.wav']
76
+
77
+ # Development sets
78
+ audio_paths = []
79
+ for _filename in ['ff1010bird_wav.zip', 'BirdVox-DCASE-20k.zip']:
80
+ DEV_URL = f'https://huggingface.co/datasets/confit/dcase2018-task3/resolve/main/{_filename}'
81
+ _dev_save_path = os.path.join(
82
+ HF_DATASETS_CACHE, 'confit___dcase2018-task3/hidaka2022investigation', VERSION
83
+ )
84
+ download_file(DEV_URL, os.path.join(_dev_save_path, _filename))
85
+ train_archive_path = dl_manager.extract(os.path.join(_dev_save_path, _filename))
86
+ logger.info(f"`{_filename}` is extracted to {train_archive_path}")
87
+ _, _walker = fast_scandir(train_archive_path, extensions, recursive=True)
88
+ audio_paths.extend(_walker)
89
+
90
+ # Evaluation set
91
+ EVAL_URL = 'https://huggingface.co/datasets/confit/dcase2018-task3/resolve/main/warblrb10k_public_wav.zip'
92
+ _eval_save_path = os.path.join(
93
+ HF_DATASETS_CACHE, 'confit___dcase2018-task3/hidaka2022investigation', VERSION
94
+ )
95
+ _filename = 'warblrb10k_public_wav.zip'
96
+ download_file(EVAL_URL, os.path.join(_eval_save_path, _filename))
97
+ test_archive_path = dl_manager.extract(os.path.join(_eval_save_path, _filename))
98
+ logger.info(f"`{_filename}` is extracted to {test_archive_path}")
99
+ _, test_audio_paths = fast_scandir(test_archive_path, extensions, recursive=True)
100
+
101
+ return [
102
+ datasets.SplitGenerator(
103
+ name=datasets.Split.TRAIN, gen_kwargs={"audio_paths": train_audio_paths, "split": "train"}
104
+ ),
105
+ datasets.SplitGenerator(
106
+ name=datasets.Split.TEST, gen_kwargs={"audio_paths": test_audio_paths, "split": "test"}
107
+ ),
108
+ ]
109
+
110
+ def _generate_examples(self, audio_paths, split=None):
111
+ if split == 'train':
112
+ _df1 = pd.read_csv('https://huggingface.co/datasets/confit/dcase2018-task3/raw/main/BirdVoxDCASE20k_csvpublic.csv')
113
+ _df2 = pd.read_csv('https://huggingface.co/datasets/confit/dcase2018-task3/raw/main/ff1010bird_metadata_2018.csv')
114
+ metadata_df = pd.concat([_df1, _df2]).reset_index(drop=True)
115
+ elif split == 'test':
116
+ metadata_df = pd.read_csv('https://huggingface.co/datasets/confit/dcase2018-task3/raw/main/warblrb10k_public_metadata_2018.csv')
117
+
118
+ fileid2class = {}
119
+ for idx, row in metadata_df.iterrows():
120
+ has_bird = row['hasbird']
121
+ if int(has_bird) == 1:
122
+ label = 'presence'
123
+ elif int(has_bird) == 0:
124
+ label = 'absence'
125
+ fileid2class[f"{row['itemid']}.wav"] = label # this filename doesn't have suffix
126
+
127
+ for guid, audio_path in enumerate(audio_paths):
128
+ fileid = Path(audio_path).name
129
+ label = fileid2class.get(fileid)
130
+ yield guid, {
131
+ "id": str(guid),
132
+ "file": audio_path,
133
+ "audio": audio_path,
134
+ "label": label,
135
+ }
136
+
137
+
138
+ def fast_scandir(path: str, exts: tp.List[str], recursive: bool = False):
139
+ # Scan files recursively faster than glob
140
+ # From github.com/drscotthawley/aeiou/blob/main/aeiou/core.py
141
+ subfolders, files = [], []
142
+
143
+ try: # hope to avoid 'permission denied' by this try
144
+ for f in os.scandir(path):
145
+ try: # 'hope to avoid too many levels of symbolic links' error
146
+ if f.is_dir():
147
+ subfolders.append(f.path)
148
+ elif f.is_file():
149
+ if os.path.splitext(f.name)[1].lower() in exts:
150
+ files.append(f.path)
151
+ except Exception:
152
+ pass
153
+ except Exception:
154
+ pass
155
+
156
+ if recursive:
157
+ for path in list(subfolders):
158
+ sf, f = fast_scandir(path, exts, recursive=recursive)
159
+ subfolders.extend(sf)
160
+ files.extend(f) # type: ignore
161
+
162
+ return subfolders, files
163
+
164
+
165
+ def download_file(
166
+ source,
167
+ dest,
168
+ unpack=False,
169
+ dest_unpack=None,
170
+ replace_existing=False,
171
+ write_permissions=False,
172
+ ):
173
+ """Downloads the file from the given source and saves it in the given
174
+ destination path.
175
+ Arguments
176
+ ---------
177
+ source : path or url
178
+ Path of the source file. If the source is an URL, it downloads it from
179
+ the web.
180
+ dest : path
181
+ Destination path.
182
+ unpack : bool
183
+ If True, it unpacks the data in the dest folder.
184
+ dest_unpack: path
185
+ Path where to store the unpacked dataset
186
+ replace_existing : bool
187
+ If True, replaces the existing files.
188
+ write_permissions: bool
189
+ When set to True, all the files in the dest_unpack directory will be granted write permissions.
190
+ This option is active only when unpack=True.
191
+ """
192
+ class DownloadProgressBar(tqdm):
193
+ """DownloadProgressBar class."""
194
+
195
+ def update_to(self, b=1, bsize=1, tsize=None):
196
+ """Needed to support multigpu training."""
197
+ if tsize is not None:
198
+ self.total = tsize
199
+ self.update(b * bsize - self.n)
200
+
201
+ # Create the destination directory if it doesn't exist
202
+ dest_dir = pathlib.Path(dest).resolve().parent
203
+ dest_dir.mkdir(parents=True, exist_ok=True)
204
+ if "http" not in source:
205
+ shutil.copyfile(source, dest)
206
+
207
+ elif not os.path.isfile(dest) or (
208
+ os.path.isfile(dest) and replace_existing
209
+ ):
210
+ logger.info(f"Downloading {source} to {dest}")
211
+ with DownloadProgressBar(
212
+ unit="B",
213
+ unit_scale=True,
214
+ miniters=1,
215
+ desc=source.split("/")[-1],
216
+ ) as t:
217
+ urllib.request.urlretrieve(
218
+ source, filename=dest, reporthook=t.update_to
219
+ )
220
+ else:
221
+ logger.info(f"{dest} exists. Skipping download")
222
+
223
+ # Unpack if necessary
224
+ if unpack:
225
+ if dest_unpack is None:
226
+ dest_unpack = os.path.dirname(dest)
227
+ logger.info(f"Extracting {dest} to {dest_unpack}")
228
+ # shutil unpack_archive does not work with tar.gz files
229
+ if (
230
+ source.endswith(".tar.gz")
231
+ or source.endswith(".tgz")
232
+ or source.endswith(".gz")
233
+ ):
234
+ out = dest.replace(".gz", "")
235
+ with gzip.open(dest, "rb") as f_in:
236
+ with open(out, "wb") as f_out:
237
+ shutil.copyfileobj(f_in, f_out)
238
+ else:
239
+ shutil.unpack_archive(dest, dest_unpack)
240
+ if write_permissions:
241
+ set_writing_permissions(dest_unpack)
242
+
243
+
244
+ def set_writing_permissions(folder_path):
245
+ """
246
+ This function sets user writing permissions to all the files in the given folder.
247
+ Arguments
248
+ ---------
249
+ folder_path : folder
250
+ Folder whose files will be granted write permissions.
251
+ """
252
+ for root, dirs, files in os.walk(folder_path):
253
+ for file_name in files:
254
+ file_path = os.path.join(root, file_name)
255
+ # Set writing permissions (mode 0o666) to the file
256
+ os.chmod(file_path, 0o666)