ajthor commited on
Commit
258a5b6
·
verified ·
1 Parent(s): aff1483

Upload folder using huggingface_hub

Browse files
.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2
+ // README at: https://github.com/devcontainers/templates/tree/main/src/python
3
+ {
4
+ "name": "Python 3 - Dedalus",
5
+ // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
6
+ "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
7
+
8
+ // Features to add to the dev container. More info: https://containers.dev/features.
9
+ "features": {
10
+ "ghcr.io/rocker-org/devcontainer-features/miniforge:2": {},
11
+ "ghcr.io/devcontainers/features/node:1": {}
12
+ },
13
+
14
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
15
+ // "forwardPorts": [],
16
+
17
+ // Use 'postCreateCommand' to run commands after the container is created.
18
+ "postCreateCommand": "bash .devcontainer/setup.sh",
19
+ "containerEnv": {
20
+ "CONDA_DEFAULT_ENV": "dedalus3"
21
+ },
22
+
23
+ // Configure tool-specific properties.
24
+ "customizations": {
25
+ "vscode": {
26
+ "extensions":[
27
+ "ms-python.python",
28
+ "ms-python.vscode-pylance"
29
+ ],
30
+ "settings": {
31
+ "python.pythonPath": "/opt/conda/envs/dedalus3/bin/python"
32
+ }
33
+ }
34
+ }
35
+
36
+ // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
37
+ // "remoteUser": "root"
38
+ }
.devcontainer/setup.sh ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ set -e
3
+
4
+ # Download the Dedalus install script to a temporary location
5
+ DEDALUS_INSTALL_SCRIPT="$(mktemp)"
6
+ curl -fsSL https://raw.githubusercontent.com/DedalusProject/dedalus_conda/master/conda_install_dedalus3.sh -o "$DEDALUS_INSTALL_SCRIPT"
7
+
8
+ # Ensure conda is initialized (adjust path if needed for your container)
9
+ if [ -f /opt/conda/etc/profile.d/conda.sh ]; then
10
+ source /opt/conda/etc/profile.d/conda.sh
11
+ elif [ -f "$HOME/miniconda3/etc/profile.d/conda.sh" ]; then
12
+ source "$HOME/miniconda3/etc/profile.d/conda.sh"
13
+ fi
14
+
15
+ # Install Dedalus only if the environment does not exist
16
+ if ! conda info --envs | grep -q dedalus3; then
17
+ conda activate base
18
+ bash "$DEDALUS_INSTALL_SCRIPT"
19
+ fi
20
+
21
+
22
+ conda activate dedalus3
23
+
24
+ conda env config vars set OMP_NUM_THREADS=1
25
+ conda env config vars set NUMEXPR_MAX_THREADS=1
26
+
27
+ # Install additional Python packages from requirements.txt
28
+ if [ -f requirements.txt ]; then
29
+ pip install -r requirements.txt
30
+ fi
31
+
32
+ # Clean up the temporary install script
33
+ rm -f "$DEDALUS_INSTALL_SCRIPT"
.gitignore ADDED
@@ -0,0 +1,250 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Created by https://www.toptal.com/developers/gitignore/api/linux,macos,windows,python
2
+ # Edit at https://www.toptal.com/developers/gitignore?templates=linux,macos,windows,python
3
+
4
+ ### Linux ###
5
+ *~
6
+
7
+ # temporary files which can be created if a process still has a handle open of a deleted file
8
+ .fuse_hidden*
9
+
10
+ # KDE directory preferences
11
+ .directory
12
+
13
+ # Linux trash folder which might appear on any partition or disk
14
+ .Trash-*
15
+
16
+ # .nfs files are created when an open file is removed but is still being accessed
17
+ .nfs*
18
+
19
+ ### macOS ###
20
+ # General
21
+ .DS_Store
22
+ .AppleDouble
23
+ .LSOverride
24
+
25
+ # Icon must end with two \r
26
+ Icon
27
+
28
+
29
+ # Thumbnails
30
+ ._*
31
+
32
+ # Files that might appear in the root of a volume
33
+ .DocumentRevisions-V100
34
+ .fseventsd
35
+ .Spotlight-V100
36
+ .TemporaryItems
37
+ .Trashes
38
+ .VolumeIcon.icns
39
+ .com.apple.timemachine.donotpresent
40
+
41
+ # Directories potentially created on remote AFP share
42
+ .AppleDB
43
+ .AppleDesktop
44
+ Network Trash Folder
45
+ Temporary Items
46
+ .apdisk
47
+
48
+ ### macOS Patch ###
49
+ # iCloud generated files
50
+ *.icloud
51
+
52
+ ### Python ###
53
+ # Byte-compiled / optimized / DLL files
54
+ __pycache__/
55
+ *.py[cod]
56
+ *$py.class
57
+
58
+ # C extensions
59
+ *.so
60
+
61
+ # Distribution / packaging
62
+ .Python
63
+ build/
64
+ develop-eggs/
65
+ dist/
66
+ downloads/
67
+ eggs/
68
+ .eggs/
69
+ lib/
70
+ lib64/
71
+ parts/
72
+ sdist/
73
+ var/
74
+ wheels/
75
+ share/python-wheels/
76
+ *.egg-info/
77
+ .installed.cfg
78
+ *.egg
79
+ MANIFEST
80
+
81
+ # PyInstaller
82
+ # Usually these files are written by a python script from a template
83
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
84
+ *.manifest
85
+ *.spec
86
+
87
+ # Installer logs
88
+ pip-log.txt
89
+ pip-delete-this-directory.txt
90
+
91
+ # Unit test / coverage reports
92
+ htmlcov/
93
+ .tox/
94
+ .nox/
95
+ .coverage
96
+ .coverage.*
97
+ .cache
98
+ nosetests.xml
99
+ coverage.xml
100
+ *.cover
101
+ *.py,cover
102
+ .hypothesis/
103
+ .pytest_cache/
104
+ cover/
105
+
106
+ # Translations
107
+ *.mo
108
+ *.pot
109
+
110
+ # Django stuff:
111
+ *.log
112
+ local_settings.py
113
+ db.sqlite3
114
+ db.sqlite3-journal
115
+
116
+ # Flask stuff:
117
+ instance/
118
+ .webassets-cache
119
+
120
+ # Scrapy stuff:
121
+ .scrapy
122
+
123
+ # Sphinx documentation
124
+ docs/_build/
125
+
126
+ # PyBuilder
127
+ .pybuilder/
128
+ target/
129
+
130
+ # Jupyter Notebook
131
+ .ipynb_checkpoints
132
+
133
+ # IPython
134
+ profile_default/
135
+ ipython_config.py
136
+
137
+ # pyenv
138
+ # For a library or package, you might want to ignore these files since the code is
139
+ # intended to run in multiple environments; otherwise, check them in:
140
+ # .python-version
141
+
142
+ # pipenv
143
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
144
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
145
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
146
+ # install all needed dependencies.
147
+ #Pipfile.lock
148
+
149
+ # poetry
150
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
151
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
152
+ # commonly ignored for libraries.
153
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
154
+ #poetry.lock
155
+
156
+ # pdm
157
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
158
+ #pdm.lock
159
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
160
+ # in version control.
161
+ # https://pdm.fming.dev/#use-with-ide
162
+ .pdm.toml
163
+
164
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
165
+ __pypackages__/
166
+
167
+ # Celery stuff
168
+ celerybeat-schedule
169
+ celerybeat.pid
170
+
171
+ # SageMath parsed files
172
+ *.sage.py
173
+
174
+ # Environments
175
+ .env
176
+ .venv
177
+ env/
178
+ venv/
179
+ ENV/
180
+ env.bak/
181
+ venv.bak/
182
+
183
+ # Spyder project settings
184
+ .spyderproject
185
+ .spyproject
186
+
187
+ # Rope project settings
188
+ .ropeproject
189
+
190
+ # mkdocs documentation
191
+ /site
192
+
193
+ # mypy
194
+ .mypy_cache/
195
+ .dmypy.json
196
+ dmypy.json
197
+
198
+ # Pyre type checker
199
+ .pyre/
200
+
201
+ # pytype static type analyzer
202
+ .pytype/
203
+
204
+ # Cython debug symbols
205
+ cython_debug/
206
+
207
+ # PyCharm
208
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
209
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
210
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
211
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
212
+ #.idea/
213
+
214
+ ### Python Patch ###
215
+ # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
216
+ poetry.toml
217
+
218
+ # ruff
219
+ .ruff_cache/
220
+
221
+ # LSP config files
222
+ pyrightconfig.json
223
+
224
+ ### Windows ###
225
+ # Windows thumbnail cache files
226
+ Thumbs.db
227
+ Thumbs.db:encryptable
228
+ ehthumbs.db
229
+ ehthumbs_vista.db
230
+
231
+ # Dump file
232
+ *.stackdump
233
+
234
+ # Folder config file
235
+ [Dd]esktop.ini
236
+
237
+ # Recycle Bin used on file shares
238
+ $RECYCLE.BIN/
239
+
240
+ # Windows Installer files
241
+ *.cab
242
+ *.msi
243
+ *.msix
244
+ *.msm
245
+ *.msp
246
+
247
+ # Windows shortcuts
248
+ *.lnk
249
+
250
+ # End of https://www.toptal.com/developers/gitignore/api/linux,macos,windows,python
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Adam Thorpe
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 2D Poisson Equation Dataset
2
+
3
+ Numerical solutions to the 2D Poisson equation with mixed boundary conditions using Dedalus spectral methods.
4
+
5
+ ![Sample Plot](sample_plot.png)
6
+
7
+ ## Equation
8
+
9
+ The 2D Poisson equation boundary value problem:
10
+
11
+ **PDE**: ∇²u = f(x,y) in Ω = [0, Lx] × [0, Ly]
12
+
13
+ **Boundary Conditions**:
14
+ - u(x,0) = g(x) (Dirichlet on bottom)
15
+ - ∂u/∂y(x,Ly) = h(x) (Neumann on top)
16
+
17
+ ## Variables
18
+
19
+ The dataset returns a dictionary with the following fields:
20
+
21
+ ### Coordinates
22
+ - `spatial_coordinates`: (2, Nx, Ny) - Combined X,Y coordinate meshgrids
23
+
24
+ ### Solution Fields
25
+ - `solution_field`: (Nx, Ny) - Solution u(x,y)
26
+ - `forcing_function`: (Nx, Ny) - Random forcing function f(x,y)
27
+
28
+ ### Boundary Conditions
29
+ - `boundary_condition_bottom`: (Nx,) - Bottom Dirichlet BC g(x)
30
+ - `boundary_condition_top_gradient`: (Nx,) - Top Neumann BC h(x)
31
+
32
+ ## Dataset Parameters
33
+
34
+ - **Domain**: [0, 2π] × [0, π] (2D rectangular domain)
35
+ - **Grid points**: 256 × 128 (Nx × Ny)
36
+ - **Discretization**: Fourier(x) × Chebyshev(y) spectral methods
37
+ - **Solver**: Dedalus LBVP (Linear Boundary Value Problem)
38
+
39
+ ### Randomization
40
+ - **Forcing function**: Generated using Gaussian processes with random length scales
41
+ - **Boundary conditions**: Fixed sinusoidal bottom BC, zero top gradient BC
42
+ - **Amplitude**: Random amplitude scaling for forcing functions (0.5 to 3.0)
43
+
44
+ ## Physical Context
45
+
46
+ This dataset simulates steady-state physical systems governed by the 2D Poisson equation.
47
+ The equation models phenomena where the spatial distribution depends on source/sink terms, including:
48
+
49
+ **Applications**:
50
+ - Electrostatic potential in the presence of charge distributions
51
+ - Steady-state heat conduction with internal heat sources
52
+ - Fluid stream functions for incompressible flow
53
+ - Gravitational potential from mass distributions
54
+
55
+ ## Usage
56
+
57
+ ```python
58
+ from dataset import PoissonDataset
59
+
60
+ # Create dataset
61
+ dataset = PoissonDataset()
62
+
63
+ # Generate a sample
64
+ sample = next(iter(dataset))
65
+
66
+ # Access solution data
67
+ spatial_coords = sample["spatial_coordinates"] # X, Y meshgrids
68
+ solution = sample["solution_field"] # u(x,y)
69
+ forcing = sample["forcing_function"] # f(x,y)
70
+ ```
71
+
72
+ ## Visualization
73
+
74
+ Run the plotting script to visualize samples:
75
+
76
+ ```bash
77
+ python plot_sample.py # 2D visualization of forcing, solution, and BCs
78
+ ```
79
+
80
+ ## Data Generation
81
+
82
+ Generate the full dataset:
83
+
84
+ ```bash
85
+ python generate_data.py
86
+ ```
87
+
88
+ This creates train/test splits saved as chunked parquet files in the `data/` directory.
data/test-00000-of-00002.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3c0a2e1edee224adbf9d1c34edb075968eb2240350202b04fd17d762a237156
3
+ size 53791742
data/test-00001-of-00002.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76413f5e46daa85491f117e33a77da79fc828e89da50e2f584e69a15d7a94baa
3
+ size 53791744
data/train-00000-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aface4e3c88e483b1b773aceaa91318ac599ff92275dc02dfc6dade8e315c279
3
+ size 53791742
data/train-00001-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7784a15cfc7f5d334dcd7fb08c993483ee45ed5fd4c820db4ccce6942295695
3
+ size 53791745
data/train-00002-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34156d9ca6c28b37221c9558d649d1ccee37c942696cba2bde104756ed855be9
3
+ size 53791742
data/train-00003-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:832bb074f5669e3163fdc210315187dff5bfe852892943c21df84803d0d24954
3
+ size 53791741
data/train-00004-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7c47b7737fe8ea24655ac0a7cb4eb640ec92ffdc44f4f4d3bde7c4f4147d51b
3
+ size 53791741
data/train-00005-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:917b54b4d9c49a9f32f3ee3e230eafcf3414451a015690a61b2ce6251bc13302
3
+ size 53791741
data/train-00006-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfdeffae75349cae238522008448b0e90f9339b8aa77dbade4ed53c41d831997
3
+ size 53791744
data/train-00007-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73f1136531afdc9d94d0345e69101824ed4683b40146a13a289b85a156c2e286
3
+ size 53791744
data/train-00008-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a99866b15e5d1ee168c7c7c1b064853c8988211b5fbdb14c24132fb4d8c4f76
3
+ size 53791743
data/train-00009-of-00010.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:805fe96d18a08e56da9ff2c885a9ca66ae34c2916b95ded4ad24e41fa0bd0bc2
3
+ size 53791744
dataset.py ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ 2D Poisson equation dataset with mixed boundary conditions.
3
+
4
+ Solves the boundary value problem:
5
+ ∇²u = f(x,y) in Ω = [0, Lx] × [0, Ly]
6
+ u(x,0) = g(x) on bottom boundary
7
+ ∂u/∂y(x,Ly) = h(x) on top boundary
8
+
9
+ Uses Dedalus spectral methods with Fourier(x) × Chebyshev(y) discretization.
10
+ The forcing function f(x,y) is randomly generated using Gaussian processes
11
+ to create diverse training samples.
12
+
13
+ Physical applications: electrostatic potential, steady-state heat conduction,
14
+ fluid stream functions.
15
+ """
16
+
17
+ import numpy as np
18
+ from torch.utils.data import IterableDataset
19
+
20
+ import dedalus.public as d3
21
+ from functools import partial
22
+ from sklearn.metrics.pairwise import rbf_kernel
23
+ import logging
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ def sample_gp_prior(kernel, X, n_samples=1):
29
+ """
30
+ Sample from Gaussian Process prior for generating smooth random fields.
31
+ """
32
+ if X.ndim == 1:
33
+ X = X.reshape(-1, 1)
34
+
35
+ K = kernel(X, X)
36
+
37
+ prior = np.random.multivariate_normal(
38
+ mean=np.zeros(X.shape[0]),
39
+ cov=K,
40
+ size=n_samples,
41
+ )
42
+
43
+ return prior
44
+
45
+
46
+ def sample_gp_posterior(kernel, X, y, xt, n_samples=1):
47
+ """Sample from Gaussian Process posterior for boundary conditions."""
48
+ if X.ndim == 1:
49
+ X = X.reshape(-1, 1)
50
+ if y.ndim == 1:
51
+ y = y.reshape(-1, 1)
52
+ if xt.ndim == 1:
53
+ xt = xt.reshape(-1, 1)
54
+
55
+ K = kernel(X, X)
56
+ Kt = kernel(X, xt)
57
+ Ktt = kernel(xt, xt)
58
+
59
+ K_inv = np.linalg.inv(K)
60
+
61
+ mu = Kt.T @ K_inv @ y
62
+ cov = Ktt - Kt.T @ K_inv @ Kt
63
+
64
+ mu = mu.ravel() # Ensure 1D array
65
+
66
+ posterior = np.random.multivariate_normal(
67
+ mean=mu,
68
+ cov=cov,
69
+ size=n_samples,
70
+ )
71
+ return posterior
72
+
73
+
74
+ class PoissonDataset(IterableDataset):
75
+ """
76
+ Dataset for 2D Poisson equation with mixed boundary conditions.
77
+
78
+ Generates random smooth forcing functions using Gaussian processes
79
+ and solves: ∇²u = f(x,y) with u(x,0) = g(x) and ∂u/∂y(x,Ly) = h(x).
80
+ """
81
+ def __init__(
82
+ self,
83
+ Lx=2*np.pi, # Domain length in x-direction
84
+ Ly=np.pi, # Domain length in y-direction
85
+ Nx=256, # Grid points in x-direction
86
+ Ny=128, # Grid points in y-direction
87
+ dtype=np.float64,
88
+ ):
89
+ """
90
+ Initialize 2D Poisson equation dataset.
91
+
92
+ Args:
93
+ Lx: Domain length in x-direction
94
+ Ly: Domain length in y-direction
95
+ Nx: Number of grid points in x-direction
96
+ Ny: Number of grid points in y-direction
97
+ dtype: Data type for computations
98
+ """
99
+ super().__init__()
100
+
101
+ # Store domain and grid parameters
102
+ self.Lx = Lx
103
+ self.Ly = Ly
104
+ self.Nx = Nx
105
+ self.Ny = Ny
106
+ self.dtype = dtype
107
+
108
+ # Setup Dedalus solver components
109
+ self.coords = d3.CartesianCoordinates('x', 'y')
110
+ self.dist = d3.Distributor(self.coords, dtype=dtype)
111
+ self.xbasis = d3.RealFourier(self.coords['x'], size=Nx, bounds=(0, Lx))
112
+ self.ybasis = d3.Chebyshev(self.coords['y'], size=Ny, bounds=(0, Ly))
113
+
114
+ # Create coordinate grids
115
+ self.x, self.y = self.dist.local_grids(self.xbasis, self.ybasis)
116
+
117
+ # Setup fields
118
+ self.u = self.dist.Field(name='u', bases=(self.xbasis, self.ybasis))
119
+ self.tau_1 = self.dist.Field(name='tau_1', bases=self.xbasis)
120
+ self.tau_2 = self.dist.Field(name='tau_2', bases=self.xbasis)
121
+ self.f = self.dist.Field(bases=(self.xbasis, self.ybasis))
122
+ self.g = self.dist.Field(bases=self.xbasis)
123
+ self.h = self.dist.Field(bases=self.xbasis)
124
+
125
+ # Setup operators and namespace for boundary value problem
126
+ dy = lambda A: d3.Differentiate(A, self.coords['y'])
127
+ lift_basis = self.ybasis.derivative_basis(2)
128
+ lift = lambda A, n: d3.Lift(A, lift_basis, n)
129
+ f = self.f
130
+ g = self.g
131
+ h = self.h
132
+ u = self.u
133
+ tau_1 = self.tau_1
134
+ tau_2 = self.tau_2
135
+ Ly = self.Ly
136
+
137
+ self.problem = d3.LBVP([self.u, self.tau_1, self.tau_2], namespace=locals())
138
+ self.problem.add_equation("lap(u) + lift(tau_1,-1) + lift(tau_2,-2) = f")
139
+ self.problem.add_equation("u(y=0) = g")
140
+ self.problem.add_equation("dy(u)(y=Ly) = h")
141
+
142
+ def __iter__(self):
143
+ """Generate infinite samples from the dataset."""
144
+ while True:
145
+ # Generate random forcing function using filtered noise
146
+ self.f.fill_random('g', seed=np.random.randint(0, 2**31))
147
+ filter_x = np.random.randint(32, 128)
148
+ filter_y = np.random.randint(16, 64)
149
+ self.f.low_pass_filter(shape=(filter_x, filter_y))
150
+
151
+ # Generate random Dirichlet boundary condition using GP
152
+ # Get x-coordinates for bottom boundary (1D array)
153
+ x_boundary = self.dist.local_grid(self.xbasis)
154
+ sigma_g = 0.2 * self.Lx
155
+ gamma_g = 1 / (2 * sigma_g**2)
156
+ g_sample = sample_gp_posterior(
157
+ kernel=partial(rbf_kernel, gamma=gamma_g),
158
+ X=np.array([0, self.Lx]),
159
+ y=np.array([0, 0]),
160
+ xt=x_boundary.ravel(),
161
+ n_samples=1
162
+ )[0]
163
+ self.g['g'] = (np.random.uniform(0.01, 0.1) * g_sample).reshape(-1, 1)
164
+
165
+ # Generate random Neumann boundary condition using GP
166
+ sigma_h = 0.3 * self.Lx
167
+ gamma_h = 1 / (2 * sigma_h**2)
168
+ h_sample = sample_gp_posterior(
169
+ kernel=partial(rbf_kernel, gamma=gamma_h),
170
+ X=np.array([0, self.Lx]),
171
+ y=np.array([0, 0]),
172
+ xt=x_boundary.ravel(),
173
+ n_samples=1
174
+ )[0]
175
+ self.h['g'] = (np.random.uniform(0.005, 0.05) * h_sample).reshape(-1, 1)
176
+
177
+ yield self.solve()
178
+
179
+ def solve(self):
180
+ """
181
+ Solve the 2D Poisson boundary value problem.
182
+
183
+ The forcing function and boundary conditions have already been set in __iter__.
184
+
185
+ Returns:
186
+ A dictionary containing the solution and related data.
187
+ """
188
+
189
+ # Build and solve the LBVP
190
+ solver = self.problem.build_solver()
191
+ solver.solve()
192
+
193
+ # Gather global data for return
194
+ x_global = self.xbasis.global_grid(self.dist, scale=1)
195
+ y_global = self.ybasis.global_grid(self.dist, scale=1)
196
+ u_global = self.u.allgather_data('g')
197
+ f_global = self.f.allgather_data('g')
198
+ g_global = self.g.allgather_data('g')
199
+ h_global = self.h.allgather_data('g')
200
+
201
+ # Create coordinate meshgrids
202
+ X, Y = np.meshgrid(x_global.ravel(), y_global.ravel(), indexing='ij')
203
+
204
+ return {
205
+ # Combined spatial coordinates
206
+ "spatial_coordinates": np.array([X, Y]), # Shape: (2, Nx, Ny)
207
+
208
+ # Solution field
209
+ "solution_field": u_global, # Shape: (Nx, Ny)
210
+
211
+ # Forcing function
212
+ "forcing_function": f_global, # Shape: (Nx, Ny)
213
+
214
+ # Boundary conditions
215
+ "boundary_condition_bottom": g_global, # Shape: (Nx,)
216
+ "boundary_condition_top_gradient": h_global, # Shape: (Nx,)
217
+ }
generate_data.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Generate 2D Poisson equation dataset and save to parquet files in chunks.
4
+ """
5
+
6
+ import os
7
+ import numpy as np
8
+ import pyarrow as pa
9
+ import pyarrow.parquet as pq
10
+ from dataset import PoissonDataset
11
+
12
+
13
+ def generate_dataset_split(
14
+ split_name="train", num_samples=1000, chunk_size=100, output_dir="data"
15
+ ):
16
+ """
17
+ Generate a dataset split and save as chunked parquet files.
18
+ """
19
+
20
+ os.makedirs(output_dir, exist_ok=True)
21
+
22
+ # Create PoissonDataset instance
23
+ dataset = PoissonDataset()
24
+
25
+ num_chunks = (num_samples + chunk_size - 1) // chunk_size # Ceiling division
26
+
27
+ print(f"Generating {num_samples} {split_name} samples in {num_chunks} chunks...")
28
+
29
+ dataset_iter = iter(dataset)
30
+ chunk_data = None
31
+
32
+ for i in range(num_samples):
33
+ sample = next(dataset_iter)
34
+
35
+ if chunk_data is None:
36
+ # Initialize chunk data on first sample
37
+ chunk_data = {key: [] for key in sample.keys()}
38
+
39
+ # Add sample to current chunk
40
+ for key, value in sample.items():
41
+ chunk_data[key].append(value)
42
+
43
+ # Save chunk when full or at end
44
+ if (i + 1) % chunk_size == 0 or i == num_samples - 1:
45
+ chunk_idx = i // chunk_size
46
+
47
+ # Convert numpy arrays to lists for PyArrow compatibility
48
+ table_data = {}
49
+ for key, values in chunk_data.items():
50
+ table_data[key] = [arr.tolist() for arr in values]
51
+
52
+ # Convert to PyArrow table
53
+ table = pa.table(table_data)
54
+
55
+ # Save chunk
56
+ filename = f"{split_name}-{chunk_idx:05d}-of-{num_chunks:05d}.parquet"
57
+ filepath = os.path.join(output_dir, filename)
58
+ pq.write_table(table, filepath)
59
+
60
+ print(f"Saved chunk {chunk_idx + 1}/{num_chunks}: {filepath}")
61
+
62
+ # Reset for next chunk
63
+ chunk_data = {key: [] for key in sample.keys()}
64
+
65
+ print(f"Generated {num_samples} {split_name} samples")
66
+ return num_samples
67
+
68
+
69
+ if __name__ == "__main__":
70
+ np.random.seed(42)
71
+
72
+ # Generate train split
73
+ generate_dataset_split("train", num_samples=1000, chunk_size=100)
74
+
75
+ # Generate test split
76
+ generate_dataset_split("test", num_samples=200, chunk_size=100)
plot_sample.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Plot a single sample from the 2D Poisson equation dataset.
4
+
5
+ Visualizes the forcing function, solution field, and boundary conditions
6
+ for a single randomly generated Poisson boundary value problem.
7
+ """
8
+
9
+ import numpy as np
10
+ import matplotlib.pyplot as plt
11
+ from dataset import PoissonDataset
12
+
13
+
14
+ def plot_poisson_sample(sample, save_path="sample_plot.png"):
15
+ """
16
+ Plot a single sample from the 2D Poisson equation dataset.
17
+
18
+ Creates a 3-panel visualization showing:
19
+ 1. Forcing function f(x,y)
20
+ 2. Solution field u(x,y)
21
+ 3. Boundary conditions
22
+ """
23
+ fig = plt.figure(figsize=(15, 5))
24
+ ax1 = plt.subplot(1, 3, 1)
25
+ ax2 = plt.subplot(1, 3, 2)
26
+ ax3 = plt.subplot(1, 3, 3)
27
+
28
+ # Extract data from dataset return dictionary
29
+ X, Y = sample["spatial_coordinates"] # Shape: (2, Nx, Ny)
30
+ solution = sample["solution_field"] # Shape: (Nx, Ny)
31
+ forcing = sample["forcing_function"] # Shape: (Nx, Ny)
32
+ bc_bottom = sample["boundary_condition_bottom"] # Shape: (Nx,)
33
+ bc_top_grad = sample["boundary_condition_top_gradient"] # Shape: (Nx,)
34
+
35
+ # Plot 1: Forcing function f(x,y)
36
+ im1 = ax1.pcolormesh(
37
+ X, Y, forcing, cmap="RdBu_r", shading="gouraud", rasterized=True
38
+ )
39
+ ax1.set_xlabel("x")
40
+ ax1.set_ylabel("y")
41
+ ax1.set_title("Forcing Function f(x,y)")
42
+ ax1.set_aspect("equal")
43
+ plt.colorbar(im1, ax=ax1, label="f(x,y)")
44
+
45
+ # Plot 2: Solution field u(x,y)
46
+ im2 = ax2.pcolormesh(
47
+ X, Y, solution, cmap="viridis", shading="gouraud", rasterized=True
48
+ )
49
+ ax2.set_xlabel("x")
50
+ ax2.set_ylabel("y")
51
+ ax2.set_title("Solution u(x,y)")
52
+ ax2.set_aspect("equal")
53
+ plt.colorbar(im2, ax=ax2, label="u(x,y)")
54
+
55
+ # Plot 3: Boundary conditions
56
+ # X has shape (Nx, Ny), we want x-coordinates along boundaries
57
+ x_bottom = X[:, 0] # x-coordinates along bottom boundary (y=0)
58
+ x_top = X[:, -1] # x-coordinates along top boundary (y=Ly)
59
+
60
+ # Flatten boundary condition arrays if needed
61
+ bc_bottom_flat = bc_bottom.ravel() if bc_bottom.ndim > 1 else bc_bottom
62
+ bc_top_grad_flat = bc_top_grad.ravel() if bc_top_grad.ndim > 1 else bc_top_grad
63
+
64
+ ax3.plot(x_bottom, bc_bottom_flat, "b-", linewidth=2, label="u(x,0) = g(x)")
65
+ ax3.plot(x_top, bc_top_grad_flat, "r--", linewidth=2, label="∂u/∂y(x,Ly) = h(x)")
66
+ ax3.set_xlabel("x")
67
+ ax3.set_ylabel("Boundary values")
68
+ ax3.set_title("Boundary Conditions")
69
+ ax3.legend()
70
+ ax3.grid(True, alpha=0.3)
71
+
72
+ plt.tight_layout()
73
+ plt.savefig(save_path, dpi=200, bbox_inches="tight")
74
+ plt.close()
75
+
76
+ print(f"Sample visualization saved to {save_path}")
77
+
78
+
79
+ if __name__ == "__main__":
80
+ # Set random seed for reproducibility
81
+ np.random.seed(42)
82
+
83
+ # Create dataset instance
84
+ dataset = PoissonDataset()
85
+
86
+ # Generate a single sample
87
+ dataset_iter = iter(dataset)
88
+ sample = next(dataset_iter)
89
+ sample = next(dataset_iter)
90
+
91
+ print("Sample keys:", list(sample.keys()))
92
+ for key, value in sample.items():
93
+ if hasattr(value, "shape"):
94
+ print(f"{key}: shape {value.shape}")
95
+ else:
96
+ print(f"{key}: {type(value)} - {value}")
97
+
98
+ # Plot the sample
99
+ plot_poisson_sample(sample)
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ numpy
2
+ torch
3
+ scikit-learn
4
+ matplotlib
5
+ pyarrow
6
+ pillow
sample_plot.png ADDED

Git LFS Details

  • SHA256: dabc4e7de482d54a75b6d88e5a9c645741ece49af1f802a078488a2d1738f211
  • Pointer size: 131 Bytes
  • Size of remote file: 703 kB