File size: 1,608 Bytes
77fc334
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
---
license: cc-by-4.0
configs:
- config_name: default
  data_files:
  - split: all_samples
    path: data/all_samples-*
dataset_info:
  features:
  - name: Base_2_2/Zone/CellData/diffusion_coefficient
    list: float32
  - name: Base_2_2/Zone/CellData/flow
    list: float32
  - name: Global/forcing_magnitude
    list: float32
  splits:
  - name: all_samples
    num_bytes: 6554400000
    num_examples: 50000
  download_size: 3321884222
  dataset_size: 6554400000
---

Example of usage:

```python
import torch
from plaid.bridges import huggingface_bridge as hfb
from torch.utils.data import DataLoader


def reshape_all(batch: dict[str, torch.Tensor]) -> dict[str, torch.Tensor]:
    """Helper function that reshapes the flattened fields into images of sizes (128, 128)."""
    batch["diffusion_coefficient"] = batch["diffusion_coefficient"].reshape(
        -1, 128, 128
    )

    batch["flow"] = batch["flow"].reshape(-1, 128, 128)

    return batch


# Load the dataset from the hub
ds = hfb.load_dataset_from_hub(
    repo_id="Nionio/PDEBench_2D_DarcyFlow", split="all_samples"
)

# Rename the features
ds = ds.rename_columns(
    {
        "Base_2_2/Zone/CellData/diffusion_coefficient": "diffusion_coefficient",
        "Base_2_2/Zone/CellData/flow": "flow",
        "Global/forcing_magnitude": "forcing",
    }
)

# Convert to torch
ds = ds.with_format("torch")

# Reshape fields
ds = ds.map(reshape_all, batched=True)

# Example of usage with a DataLoader
dl = DataLoader(ds, batch_size=32, shuffle=True)
for batch in dl:
    for k, v in batch.items():
        print(k, v.shape)
        break
```