Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -1,3 +1,55 @@
|
|
| 1 |
-
---
|
| 2 |
-
license: cc-by-4.0
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: cc-by-4.0
|
| 3 |
+
pretty_name: Kolmogorov flow
|
| 4 |
+
---
|
| 5 |
+
|
| 6 |
+
## Simulations of Kolmogorov flow at different Reynolds numbers
|
| 7 |
+
|
| 8 |
+
This directory contains HDF5 trajectory datasets for 2D Kolmogorov flow simulations across multiple Reynolds numbers.
|
| 9 |
+
|
| 10 |
+
Each simulation corresponds to 60,000 time steps of the Kolmogorov flow at fixed Reynolds number: 6 snapshots for each of 10,000 turnover times. The simulation time and cadence is thus scaled by the Reynolds number. Burn-in has already been removed from each set of snapshots. The integrator timestep is much smaller than the snapshot interval, and the trajectories have been validated for stationarity using the gamma statistic of Chandler and Kerswell (2013).
|
| 11 |
+
|
| 12 |
+
The trajectory fields store vorticity snapshots in a 3D array of shape (T, 512, 512), where T is the number of time steps. In two-dimensions, the streamfunction can be uniquely identified from the vorticity using a Poisson solve, and so the raw vorticity fully characterizes the flow.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
### Data validation
|
| 16 |
+
|
| 17 |
+
To ensure the data is valid
|
| 18 |
+
|
| 19 |
+
```bash
|
| 20 |
+
python check_data.py
|
| 21 |
+
```
|
| 22 |
+
|
| 23 |
+
### Usage example
|
| 24 |
+
|
| 25 |
+
To stream each simulation, we can use the `h5py` library to read the trajectory in chunks. Recall that h5py does not load data into memory until a cast forces it to be materialized, and so we can stream the data in chunks.
|
| 26 |
+
|
| 27 |
+
```python
|
| 28 |
+
import h5py
|
| 29 |
+
import numpy as np
|
| 30 |
+
|
| 31 |
+
h5_path = "./re40.h5"
|
| 32 |
+
|
| 33 |
+
all_obs = []
|
| 34 |
+
batch_size = 64 # number of time steps to read into memory at a time
|
| 35 |
+
|
| 36 |
+
with h5py.File(h5_path, "r") as f:
|
| 37 |
+
|
| 38 |
+
## Load the simulationmetadata.
|
| 39 |
+
reynolds_number = float(f.attrs["reynolds_number"])
|
| 40 |
+
nu = 1.0 / reynolds_number # viscosity
|
| 41 |
+
domain_extent = float(f.attrs.get("domain_extent", 2 * np.pi))
|
| 42 |
+
injection_mode = int(f.attrs.get("injection_mode", f.attrs["forcing_frequency"]))
|
| 43 |
+
time_between_snapshots = float(f.attrs["time_between_snapshots"])
|
| 44 |
+
|
| 45 |
+
## Iterate over the trajectory in chunks. Avoid casting the trajectory itself to a
|
| 46 |
+
## numpy array, because it's too large to fit into memory.
|
| 47 |
+
traj = f["trajectory"]
|
| 48 |
+
n_steps = traj.shape[0]
|
| 49 |
+
first_batch = traj[:batch_size, 0, :, :]
|
| 50 |
+
for start in range(first_batch.shape[0], n_steps, batch_size):
|
| 51 |
+
print(f"Running {start} / {n_steps}", flush=True)
|
| 52 |
+
stop = min(start + batch_size, n_steps)
|
| 53 |
+
batch = traj[start:stop, 0, :, :]
|
| 54 |
+
## Can operate on the batch here.
|
| 55 |
+
```
|
check_data.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import argparse
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Any
|
| 7 |
+
|
| 8 |
+
import h5py
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def build_parser() -> argparse.ArgumentParser:
|
| 13 |
+
parser = argparse.ArgumentParser(
|
| 14 |
+
description=(
|
| 15 |
+
"Check every HDF5 file in the script directory and verify that all "
|
| 16 |
+
"numeric datasets and numeric attributes are real-valued and finite."
|
| 17 |
+
)
|
| 18 |
+
)
|
| 19 |
+
parser.add_argument(
|
| 20 |
+
"--pattern",
|
| 21 |
+
default="*.h5",
|
| 22 |
+
help="Glob pattern used inside the script directory. Default: %(default)s.",
|
| 23 |
+
)
|
| 24 |
+
return parser
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def iter_datasets(group: h5py.Group, prefix: str = ""):
|
| 28 |
+
for name, item in group.items():
|
| 29 |
+
path = f"{prefix}/{name}" if prefix else name
|
| 30 |
+
if isinstance(item, h5py.Dataset):
|
| 31 |
+
yield path, item
|
| 32 |
+
elif isinstance(item, h5py.Group):
|
| 33 |
+
yield from iter_datasets(item, path)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def validate_numeric_array(values: np.ndarray[Any, Any], *, label: str) -> None:
|
| 37 |
+
if np.iscomplexobj(values):
|
| 38 |
+
raise ValueError(f"{label} contains complex values.")
|
| 39 |
+
|
| 40 |
+
if np.issubdtype(values.dtype, np.number) and not np.isfinite(values).all():
|
| 41 |
+
raise ValueError(f"{label} contains non-finite values.")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def validate_attr_value(value: Any, *, label: str) -> None:
|
| 45 |
+
array = np.asarray(value)
|
| 46 |
+
if array.dtype.kind not in {"b", "i", "u", "f", "c"}:
|
| 47 |
+
return
|
| 48 |
+
validate_numeric_array(array, label=label)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def validate_h5_file(h5_path: Path) -> None:
|
| 52 |
+
print(f"Checking {h5_path}", flush=True)
|
| 53 |
+
|
| 54 |
+
with h5py.File(h5_path, "r") as handle:
|
| 55 |
+
for attr_name, attr_value in handle.attrs.items():
|
| 56 |
+
validate_attr_value(attr_value, label=f"{h5_path} attr {attr_name!r}")
|
| 57 |
+
|
| 58 |
+
for dataset_path, dataset in iter_datasets(handle):
|
| 59 |
+
values = np.asarray(dataset)
|
| 60 |
+
validate_numeric_array(values, label=f"{h5_path}:{dataset_path}")
|
| 61 |
+
|
| 62 |
+
for attr_name, attr_value in dataset.attrs.items():
|
| 63 |
+
validate_attr_value(
|
| 64 |
+
attr_value,
|
| 65 |
+
label=f"{h5_path}:{dataset_path} attr {attr_name!r}",
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def main() -> int:
|
| 70 |
+
args = build_parser().parse_args()
|
| 71 |
+
script_dir = Path(__file__).resolve().parent
|
| 72 |
+
h5_paths = sorted(path for path in script_dir.glob(args.pattern) if path.is_file())
|
| 73 |
+
|
| 74 |
+
if not h5_paths:
|
| 75 |
+
print(
|
| 76 |
+
f"No HDF5 files found in {script_dir} matching {args.pattern!r}.",
|
| 77 |
+
file=sys.stderr,
|
| 78 |
+
)
|
| 79 |
+
return 1
|
| 80 |
+
|
| 81 |
+
for h5_path in h5_paths:
|
| 82 |
+
validate_h5_file(h5_path)
|
| 83 |
+
|
| 84 |
+
print(f"Validated {len(h5_paths)} file(s) in {script_dir}.", flush=True)
|
| 85 |
+
return 0
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
if __name__ == "__main__":
|
| 89 |
+
raise SystemExit(main())
|
re100.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3137ec5f66372f30015d7a391437323eb48bb172b5060773bb29f00eaf2e6ac3
|
| 3 |
+
size 39184876606
|
re120.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:be2f81bde9a09b4cc09aeec6b28c1fbe3b7db18f3ccd2ac487e3ab0e325228a8
|
| 3 |
+
size 43590560597
|
re140.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b106096d16b9d0abd5eb6ed9d7cc4b8b69e7501be55ab57482547f0e1827f674
|
| 3 |
+
size 37323064912
|
re160.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b5e03f94277d6ba9518b69e18dc07352cbe1eec28450c3d26bb4dcfd6b193d91
|
| 3 |
+
size 49133216158
|
re180.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:52347d468d5fc90d85038e9fede72ed9fa08e97f5968e6321df6fc76ec0baa6a
|
| 3 |
+
size 43675706951
|
re40.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:57b672c3a10dfdac6ce7c8e860c28b4175786696fbc050c5b3f0720fa8be4320
|
| 3 |
+
size 43701939578
|
re60.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:98b19cfa87e69d687fca667c7e51d080171f2c2370061170a755f4ae5d739865
|
| 3 |
+
size 43562339884
|
re80.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2ffbeb9e6f9add93e3d11868f69ab92591cdf889353fe24541a309a003928ae5
|
| 3 |
+
size 48977582175
|