|
|
--- |
|
|
license: cc-by-4.0 |
|
|
configs: |
|
|
- config_name: default |
|
|
data_files: |
|
|
- split: all_samples |
|
|
path: data/all_samples-* |
|
|
dataset_info: |
|
|
features: |
|
|
- name: Base_2_2/Zone/CellData/diffusion_coefficient |
|
|
list: float32 |
|
|
- name: Base_2_2/Zone/CellData/flow |
|
|
list: float32 |
|
|
- name: Global/forcing_magnitude |
|
|
list: float32 |
|
|
splits: |
|
|
- name: all_samples |
|
|
num_bytes: 6554400000 |
|
|
num_examples: 50000 |
|
|
download_size: 3321884222 |
|
|
dataset_size: 6554400000 |
|
|
--- |
|
|
|
|
|
Example of usage: |
|
|
|
|
|
```python |
|
|
import torch |
|
|
from plaid.bridges import huggingface_bridge as hfb |
|
|
from torch.utils.data import DataLoader |
|
|
|
|
|
|
|
|
def reshape_all(batch: dict[str, torch.Tensor]) -> dict[str, torch.Tensor]: |
|
|
"""Helper function that reshapes the flattened fields into images of sizes (128, 128).""" |
|
|
batch["diffusion_coefficient"] = batch["diffusion_coefficient"].reshape( |
|
|
-1, 128, 128 |
|
|
) |
|
|
|
|
|
batch["flow"] = batch["flow"].reshape(-1, 128, 128) |
|
|
|
|
|
return batch |
|
|
|
|
|
|
|
|
# Load the dataset from the hub |
|
|
ds = hfb.load_dataset_from_hub( |
|
|
repo_id="Nionio/PDEBench_2D_DarcyFlow", split="all_samples" |
|
|
) |
|
|
|
|
|
# Rename the features |
|
|
ds = ds.rename_columns( |
|
|
{ |
|
|
"Base_2_2/Zone/CellData/diffusion_coefficient": "diffusion_coefficient", |
|
|
"Base_2_2/Zone/CellData/flow": "flow", |
|
|
"Global/forcing_magnitude": "forcing", |
|
|
} |
|
|
) |
|
|
|
|
|
# Convert to torch |
|
|
ds = ds.with_format("torch") |
|
|
|
|
|
# Reshape fields |
|
|
ds = ds.map(reshape_all, batched=True) |
|
|
|
|
|
# Example of usage with a DataLoader |
|
|
dl = DataLoader(ds, batch_size=32, shuffle=True) |
|
|
for batch in dl: |
|
|
for k, v in batch.items(): |
|
|
print(k, v.shape) |
|
|
break |
|
|
``` |