quakeflow_das / example.py
zhuwq0's picture
Restructure folders from data/{location} to {location}/data to match gs://quakeflow_das
3acb188
raw
history blame
2.09 kB
"""QuakeFlow DAS: Read DAS event waveforms from HuggingFace.
Files are downloaded on first access and cached locally.
"""
# %%
import functools
import h5py
import numpy as np
from huggingface_hub import hf_hub_download, list_repo_files
try:
import torch
_TORCH_AVAILABLE = True
except ImportError:
_TORCH_AVAILABLE = False
REPO_ID = "AI4EPS/quakeflow_das"
def read_event(filepath):
"""Read a single DAS event HDF5 file."""
with h5py.File(filepath, "r") as f:
result = {"data": f["data"][:].astype(np.float32)}
for key, val in f["data"].attrs.items():
result[key] = val.decode("utf-8", errors="replace") if isinstance(val, bytes) else val
return result
@functools.lru_cache(maxsize=None)
def list_h5(subset):
"""List all .h5 files for a subset from the HuggingFace repo (cached)."""
prefix = f"{subset}/data/"
return sorted(f for f in list_repo_files(REPO_ID, repo_type="dataset") if f.startswith(prefix) and f.endswith(".h5"))
def download(repo_path):
"""Download a file from HuggingFace (cached after first download)."""
return hf_hub_download(REPO_ID, repo_path, repo_type="dataset", local_dir=".")
_base_class = torch.utils.data.Dataset if _TORCH_AVAILABLE else object
class DASDataset(_base_class):
"""PyTorch Dataset for DAS events. Downloads files on first access."""
def __init__(self, subset, max_events=None):
self.files = list_h5(subset)
if max_events is not None:
self.files = self.files[:max_events]
def __len__(self):
return len(self.files)
def __getitem__(self, idx):
filepath = download(self.files[idx])
return read_event(filepath)
# %% Example: iterate over events
if __name__ == "__main__":
for subset in ["ridgecrest_north", "arcata"]:
print(f"\n=== {subset} ===")
dataset = DASDataset(subset, max_events=3)
for i in range(len(dataset)):
event = dataset[i]
print(f" {event['event_id']}: shape={event['data'].shape}, mag={event.get('magnitude', 'N/A')}")
# %%