Datasets:

Modalities:
Tabular
Text
Size:
< 1K
DOI:
License:
shakxy42 commited on
Commit
0820ae1
·
verified ·
1 Parent(s): f3a51e0

Update BubbleML_2.py

Browse files
Files changed (1) hide show
  1. BubbleML_2.py +73 -42
BubbleML_2.py CHANGED
@@ -11,6 +11,7 @@ from datasets import (
11
  GeneratorBasedBuilder,
12
  DatasetInfo,
13
  Features,
 
14
  Sequence,
15
  Value,
16
  Split,
@@ -24,13 +25,16 @@ Pre-defined train/test splits across two directories.
24
  """
25
 
26
  class BubbleMLConfig(BuilderConfig):
27
- """BuilderConfig for BubbleML_2."""
28
  def __init__(
29
  self,
30
  *,
31
  name: str,
32
  description: str,
33
  data_files: dict,
 
 
 
34
  data_dir: str = "",
35
  version: Version = Version("1.0.0"),
36
  **kwargs,
@@ -38,6 +42,9 @@ class BubbleMLConfig(BuilderConfig):
38
  super().__init__(name=name, version=version, description=description, **kwargs)
39
  self.data_files = data_files
40
  self.data_dir = data_dir
 
 
 
41
 
42
  class BubbleMLDataset(GeneratorBasedBuilder):
43
  """BubbleML_2: combined single-bubble dataset."""
@@ -47,7 +54,10 @@ class BubbleMLDataset(GeneratorBasedBuilder):
47
  BubbleMLConfig(
48
  name="single-bubble",
49
  description="Single-bubble (FC72 & R515B) train/test split",
50
- data_dir="", # use repo root locally; if empty, remote download is used
 
 
 
51
  data_files={
52
  "train": [
53
  # FC72 train
@@ -84,11 +94,15 @@ class BubbleMLDataset(GeneratorBasedBuilder):
84
  ]
85
 
86
  def _info(self) -> DatasetInfo:
 
 
 
 
87
  features = Features({
88
- "input": Sequence(Sequence(Sequence(Sequence(Value("float32"))))),
89
- "output": Sequence(Sequence(Sequence(Sequence(Value("float32"))))),
90
  "fluid_params": Sequence(Value("float32")),
91
- "filename": Value("string"),
92
  })
93
  return DatasetInfo(
94
  description=_DESCRIPTION,
@@ -102,50 +116,54 @@ class BubbleMLDataset(GeneratorBasedBuilder):
102
  cfg = self.config
103
 
104
  if cfg.data_dir:
105
- # Local testing: use the provided data_dir
106
  base_dir = cfg.data_dir
107
  def resolve_local(split):
108
- paths = []
109
  for rel in cfg.data_files[split]:
110
- full = os.path.join(base_dir, rel)
111
- if not os.path.isfile(full):
112
- raise FileNotFoundError(f"Expected {full}, but it was not found.")
113
- paths.append((full, full.replace(".hdf5", ".json")))
114
- return paths
115
- train_pairs = resolve_local("train")
116
- test_pairs = resolve_local("test")
 
 
 
117
  else:
118
- # Remote download: fetch each .hdf5 and .json from the Hub
119
  base_url = "https://huggingface.co/datasets/hpcforge/BubbleML_2/resolve/main/"
120
- # build map of rel -> url
121
  url_map = {}
122
- for split in ["train", "test"]:
123
  for rel in cfg.data_files[split]:
124
  url_map[rel] = base_url + rel
125
- meta = rel.replace(".hdf5", ".json")
126
- url_map[meta] = base_url + meta
127
- # download all
128
- downloaded = dl_manager.download(url_map)
129
- # resolve into pairs
130
- train_pairs = []
131
- test_pairs = []
132
- for split in ["train", "test"]:
133
  for rel in cfg.data_files[split]:
134
- h5_path = downloaded[rel]
135
- json_path = downloaded[rel.replace(".hdf5", ".json")]
136
- train_pairs.append((h5_path, json_path)) if split == "train" else test_pairs.append((h5_path, json_path))
137
 
138
  return [
139
- datasets.SplitGenerator(name=Split.TRAIN, gen_kwargs={"file_pairs": train_pairs}),
140
- datasets.SplitGenerator(name=Split.TEST, gen_kwargs={"file_pairs": test_pairs}),
141
  ]
142
 
143
  def _generate_examples(self, file_pairs):
144
- """Yield examples from each (hdf5,json) pair."""
145
- for idx, (h5_path, json_path) in enumerate(file_pairs):
 
 
 
146
  with h5py.File(h5_path, "r") as h5f:
147
- inp = np.stack([h5f[k][:5] for k in ["dfun", "temperature", "velx", "vely"]])
148
- out = np.stack([h5f[k][5:10] for k in ["dfun", "temperature", "velx", "vely"]])
 
 
 
149
  with open(json_path, "r") as jf:
150
  p = json.load(jf)
151
  fluid_params = [
@@ -154,14 +172,27 @@ class BubbleMLDataset(GeneratorBasedBuilder):
154
  p["mugas"],
155
  p["rhogas"],
156
  p["thcogas"],
157
- p["stefan"],
158
  p["prandtl"],
159
  p["heater"]["nucWaitTime"],
160
- p["heater"]["wallTemp"],
161
  ]
162
- yield idx, {
163
- "input": inp.astype(np.float32).tolist(),
164
- "output": out.astype(np.float32).tolist(),
165
- "fluid_params": fluid_params,
166
- "filename": os.path.basename(h5_path),
167
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  GeneratorBasedBuilder,
12
  DatasetInfo,
13
  Features,
14
+ Array4D,
15
  Sequence,
16
  Value,
17
  Split,
 
25
  """
26
 
27
  class BubbleMLConfig(BuilderConfig):
28
+ """BuilderConfig for BubbleML_2 with fixed T, H, W."""
29
  def __init__(
30
  self,
31
  *,
32
  name: str,
33
  description: str,
34
  data_files: dict,
35
+ timesteps: int,
36
+ height: int,
37
+ width: int,
38
  data_dir: str = "",
39
  version: Version = Version("1.0.0"),
40
  **kwargs,
 
42
  super().__init__(name=name, version=version, description=description, **kwargs)
43
  self.data_files = data_files
44
  self.data_dir = data_dir
45
+ self.timesteps = timesteps
46
+ self.height = height
47
+ self.width = width
48
 
49
  class BubbleMLDataset(GeneratorBasedBuilder):
50
  """BubbleML_2: combined single-bubble dataset."""
 
54
  BubbleMLConfig(
55
  name="single-bubble",
56
  description="Single-bubble (FC72 & R515B) train/test split",
57
+ data_dir="", # local root; overridden by --data_dir
58
+ timesteps=5,
59
+ height=288,
60
+ width=192,
61
  data_files={
62
  "train": [
63
  # FC72 train
 
94
  ]
95
 
96
  def _info(self) -> DatasetInfo:
97
+ cfg = self.config
98
+ # C = number of fields
99
+ C = 4
100
+ T, H, W = cfg.timesteps, cfg.height, cfg.width
101
  features = Features({
102
+ "input": Array4D(dtype="float32", shape=(T, C, H, W)),
103
+ "output": Array4D(dtype="float32", shape=(T, C, H, W)),
104
  "fluid_params": Sequence(Value("float32")),
105
+ "filename": Value("string"),
106
  })
107
  return DatasetInfo(
108
  description=_DESCRIPTION,
 
116
  cfg = self.config
117
 
118
  if cfg.data_dir:
119
+ # Local: read files from disk
120
  base_dir = cfg.data_dir
121
  def resolve_local(split):
122
+ pairs = []
123
  for rel in cfg.data_files[split]:
124
+ h5p = os.path.join(base_dir, rel)
125
+ jp = h5p.replace(".hdf5", ".json")
126
+ if not os.path.isfile(h5p):
127
+ raise FileNotFoundError(f"{h5p} not found.")
128
+ if not os.path.isfile(jp):
129
+ raise FileNotFoundError(f"{jp} not found.")
130
+ pairs.append((h5p, jp))
131
+ return pairs
132
+ train = resolve_local("train")
133
+ test = resolve_local("test")
134
  else:
135
+ # Remote: download from Hub
136
  base_url = "https://huggingface.co/datasets/hpcforge/BubbleML_2/resolve/main/"
 
137
  url_map = {}
138
+ for split in ["train","test"]:
139
  for rel in cfg.data_files[split]:
140
  url_map[rel] = base_url + rel
141
+ url_map[rel.replace(".hdf5", ".json")] = base_url + rel.replace(".hdf5", ".json")
142
+ dl = dl_manager.download(url_map)
143
+ train, test = [], []
144
+ for split in ["train","test"]:
 
 
 
 
145
  for rel in cfg.data_files[split]:
146
+ h5p = dl[rel]
147
+ jp = dl[rel.replace(".hdf5", ".json")]
148
+ (train if split=="train" else test).append((h5p,jp))
149
 
150
  return [
151
+ datasets.SplitGenerator(name=Split.TRAIN, gen_kwargs={"file_pairs": train}),
152
+ datasets.SplitGenerator(name=Split.TEST, gen_kwargs={"file_pairs": test}),
153
  ]
154
 
155
  def _generate_examples(self, file_pairs):
156
+ """Yield sliding-window examples, yielding NumPy arrays directly."""
157
+ idx = 0
158
+ fields = ["dfun", "temperature", "velx", "vely"]
159
+ tw = self.config.timesteps
160
+ for h5_path, json_path in file_pairs:
161
  with h5py.File(h5_path, "r") as h5f:
162
+ arrays = {k: h5f[k][...] for k in fields} # (T, H, W)
163
+ T = arrays[fields[0]].shape[0]
164
+ max_start = T - 2*tw + 1
165
+
166
+ # metadata
167
  with open(json_path, "r") as jf:
168
  p = json.load(jf)
169
  fluid_params = [
 
172
  p["mugas"],
173
  p["rhogas"],
174
  p["thcogas"],
175
+ p.get("stefan", 0.0),
176
  p["prandtl"],
177
  p["heater"]["nucWaitTime"],
178
+ p["heater"].get("wallTemp", 0.0),
179
  ]
180
+
181
+ for start in range(max_start):
182
+ ei = start + tw
183
+ eo = ei + tw
184
+
185
+ inp_c_t_h_w = np.stack([arrays[k][start:ei] for k in fields], axis=0)
186
+ out_c_t_h_w = np.stack([arrays[k][ei:eo] for k in fields], axis=0)
187
+
188
+ # transpose (C,T,H,W) -> (T,C,H,W)
189
+ inp = inp_c_t_h_w.transpose(1,0,2,3).astype("float32")
190
+ out = out_c_t_h_w.transpose(1,0,2,3).astype("float32")
191
+
192
+ yield idx, {
193
+ "input": inp,
194
+ "output": out,
195
+ "fluid_params": fluid_params,
196
+ "filename": os.path.basename(h5_path),
197
+ }
198
+ idx += 1