File size: 2,092 Bytes
4de3f1f
 
3acb188
4de3f1f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3acb188
4de3f1f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
"""QuakeFlow DAS: Read DAS event waveforms from HuggingFace.

Files are downloaded on first access and cached locally.
"""

# %%
import functools

import h5py
import numpy as np
from huggingface_hub import hf_hub_download, list_repo_files

try:
    import torch

    _TORCH_AVAILABLE = True
except ImportError:
    _TORCH_AVAILABLE = False

REPO_ID = "AI4EPS/quakeflow_das"


def read_event(filepath):
    """Read a single DAS event HDF5 file."""
    with h5py.File(filepath, "r") as f:
        result = {"data": f["data"][:].astype(np.float32)}
        for key, val in f["data"].attrs.items():
            result[key] = val.decode("utf-8", errors="replace") if isinstance(val, bytes) else val
    return result


@functools.lru_cache(maxsize=None)
def list_h5(subset):
    """List all .h5 files for a subset from the HuggingFace repo (cached)."""
    prefix = f"{subset}/data/"
    return sorted(f for f in list_repo_files(REPO_ID, repo_type="dataset") if f.startswith(prefix) and f.endswith(".h5"))


def download(repo_path):
    """Download a file from HuggingFace (cached after first download)."""
    return hf_hub_download(REPO_ID, repo_path, repo_type="dataset", local_dir=".")


_base_class = torch.utils.data.Dataset if _TORCH_AVAILABLE else object


class DASDataset(_base_class):
    """PyTorch Dataset for DAS events. Downloads files on first access."""

    def __init__(self, subset, max_events=None):
        self.files = list_h5(subset)
        if max_events is not None:
            self.files = self.files[:max_events]

    def __len__(self):
        return len(self.files)

    def __getitem__(self, idx):
        filepath = download(self.files[idx])
        return read_event(filepath)


# %% Example: iterate over events
if __name__ == "__main__":
    for subset in ["ridgecrest_north", "arcata"]:
        print(f"\n=== {subset} ===")
        dataset = DASDataset(subset, max_events=3)
        for i in range(len(dataset)):
            event = dataset[i]
            print(f"  {event['event_id']}: shape={event['data'].shape}, mag={event.get('magnitude', 'N/A')}")

# %%