PS_Alaska / PS_Alaska.py
Aaaapril's picture
Upload PS_Alaska.py with huggingface_hub
c091b48 verified
raw
history blame
6.39 kB
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""P and S phase arrivals dataset for Alaska"""
import h5py
import csv
import os
import datasets
_PSAlaska_DESCRIPTION = """
"""
_ManualPick_CITATION = """\
@InProceedings{huggingface:dataset,
title = {A great new dataset},
author={huggingface, Inc.
},
year={2020}
}
"""
_ManualPick_DESCRIPTION = """\
This dataset includes P and S phases recorded by the broadband stations in the Alaska Peninsula
"""
_PNTFIter1_CITATION = """
"""
_PNTFIter1_DESCRIPTION = """
This dataset includes P and S phases predicted by the PhaseNet-TF using model trained by the manualpick dataset
"""
_PNTFIter1Combined_CITATION = """
"""
_PNTFIter1Combined_DESCRIPTION = """
This dataset includes all P and S phases from PNTFiter1 dataset and all false negative arrivals of manualpick dataset
"""
_Data_URL = "/mnt/scratch/jieyaqi/alaska/final/PS_Alaska"
class PSAlaskaConfig(datasets.BuilderConfig):
def __init__(self, description, data_url, citation, **kwargs):
"""BuilderConfig for PS_Alaska.
Args:
features: `list[string]`, list of the features that will appear in the
feature dict. Should not include "label".
data_url: `string`, url to download the zip file from.
citation: `string`, citation for the data set.
url: `string`, url for information about the data set.
label_classes: `list[string]`, the list of classes for the label if the
label is present as a string. Non-string labels will be cast to either
'False' or 'True'.
**kwargs: keyword arguments forwarded to super.
"""
super(PSAlaskaConfig, self).__init__(
version = datasets.Version("1.0.0"),
**kwargs)
self.description = description
self.data_url = data_url
self.citation = citation
class PSAlaskaDataset(datasets.GeneratorBasedBuilder):
"""P and S phase arrivals dataset for Alaska"""
VERSION = datasets.Version("1.0.0")
BUILDER_CONFIGS = [
PSAlaskaConfig(
name="ManualPick",
description=_ManualPick_DESCRIPTION,
data_url=_Data_URL+"/ManualPick",
citation=_ManualPick_CITATION,
),
PSAlaskaConfig(
name="PNTFIter1",
description=_PNTFIter1_DESCRIPTION,
data_url=_Data_URL+"/PNTFIter1",
citation=_PNTFIter1_CITATION,
),
PSAlaskaConfig(
name="PNTFIter1_combined",
description=_PNTFIter1Combined_DESCRIPTION,
data_url=_Data_URL+"/PNTFIter1_combined",
citation=_PNTFIter1Combined_CITATION,
),
]
DEFAULT_CONFIG_NAME = "PNTFIter1_combined"
def _info(self):
return datasets.DatasetInfo(
description=_PSAlaska_DESCRIPTION + self.config.description,
features=datasets.Features(
{
"begin_time": datasets.Value("string"),
"end_time": datasets.Value("string"),
"component": datasets.Sequence(datasets.Value('string')),
"dt_s": datasets.Value("float"),
"event_id": datasets.Value("string"),
"station": datasets.Value("string"),
"network": datasets.Value("string"),
"phase_index": datasets.Sequence(datasets.Value('int32')),
"phase_time": datasets.Sequence(datasets.Value('string')),
"phase_type": datasets.Sequence(datasets.Value('string')),
"waveform": datasets.Array2D(shape=(3, 24000), dtype='float32'),
}
),
supervised_keys=("waveform", "phase_type"),
citation=self.config.citation,
)
def _split_generators(self, dl_manager):
urls = self.config.data_url
data_dir = dl_manager.download_and_extract(urls)
stationf = dl_manager.download_and_extract(_Data_URL, 'stations.csv')
stationl = []
eventl = []
waveform_files = {}
with open(stationf, newline='') as csvfile:
r = csv.reader(csvfile, delimiter=',')
next(r)
for row in r:
stationl.append(row[-1])
with open(os.path.join(data_dir, 'catalogs.csv'), newline='') as csvfile:
r = csv.reader(csvfile, delimiter=',')
next(r)
for row in r:
eventl.append(row[3])
for e in eventl:
waveform_files[e] = os.path.join(data_dir, 'waveform', f'{e}.h5')
return [
datasets.SplitGenerator(
name="full",
gen_kwargs={
"stations": stationl,
"events": eventl,
"waveform_files": waveform_files
},
),
]
def _generate_examples(self, stations, events, waveform_files):
for e in events:
f = h5py.File(waveform_files[e], 'r')
for sta in f[e].keys():
key = f'{e}_{sta}'
meta = f[e][sta].attrs
yield key, {
"begin_time": meta['begin_time'],
"end_time": meta['end_time'],
"component": meta['component'],
"dt_s": meta['dt_s'],
"event_id": meta['event_id'],
"station": meta['station'],
"network": meta['network'],
"phase_index": meta['phase_index'],
"phase_time": meta['phase_time'],
"phase_type": meta['phase_type'],
"waveform": f[e][sta]
}
f.close()