ppak10 commited on
Commit
fb4cb6b
·
1 Parent(s): 53fa50c

Updates dataset script with loading parquet files.

Browse files
NIST-In-Situ-IN625-LPBF-Overhangs.py CHANGED
@@ -1,6 +1,8 @@
1
  import datasets
 
2
  import os
3
  import pickle
 
4
 
5
  _DESCRIPTION = """\
6
  In Situ Thermography During Laser Powder Bed Fusion of a Nickel Superalloy 625
@@ -18,6 +20,17 @@ LAYER_OVERHANG_WITH_SUPPORTS_URLS = [f"./layer/overhang_with_supports/{n}.pkl" f
18
  LAYER_BLOCK_URLS = [f"./layer/block/{n}.pkl" for n in range(281, 381, 1)]
19
  LAYER_OVERHANG_NO_SUPPORTS_URLS = [f"./layer/overhang_no_supports/{n}.pkl" for n in range(381, 560, 1)]
20
 
 
 
 
 
 
 
 
 
 
 
 
21
  _URLS = {
22
  "part_section": {
23
  "base": "./part_section/BASE.pkl",
@@ -30,6 +43,12 @@ _URLS = {
30
  "block": LAYER_BLOCK_URLS,
31
  "overhang_no_supports": LAYER_OVERHANG_NO_SUPPORTS_URLS,
32
  "overhang_with_supports": LAYER_OVERHANG_WITH_SUPPORTS_URLS,
 
 
 
 
 
 
33
  }
34
  }
35
 
@@ -47,9 +66,14 @@ class NISTInSituIN625LPBFOverhangsDataset(datasets.GeneratorBasedBuilder):
47
  description="Provides layer-wise attributes of entire dataset",
48
  version=VERSION,
49
  ),
 
 
 
 
 
50
  ]
51
 
52
- DEFAULT_CONFIG_NAME = "layer"
53
 
54
  def _info(self):
55
  features = datasets.Features({
@@ -128,3 +152,25 @@ class NISTInSituIN625LPBFOverhangsDataset(datasets.GeneratorBasedBuilder):
128
  with open(path, "rb") as f:
129
  layer = pickle.load(f)
130
  yield index, layer
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import datasets
2
+ import numpy as np
3
  import os
4
  import pickle
5
+ import pyarrow.parquet as pq
6
 
7
  _DESCRIPTION = """\
8
  In Situ Thermography During Laser Powder Bed Fusion of a Nickel Superalloy 625
 
20
  LAYER_BLOCK_URLS = [f"./layer/block/{n}.pkl" for n in range(281, 381, 1)]
21
  LAYER_OVERHANG_NO_SUPPORTS_URLS = [f"./layer/overhang_no_supports/{n}.pkl" for n in range(381, 560, 1)]
22
 
23
+ LAYER_TABLE_BASE_URLS = []
24
+
25
+ # Layers 1 to 99 inclusive without layer 22
26
+ for layer_number in range(1, 100, 1):
27
+ if layer_number != 22:
28
+ LAYER_TABLE_BASE_URLS.append(f"./layer/base/{layer_number}.parquet")
29
+
30
+ LAYER_TABLE_OVERHANG_WITH_SUPPORTS_URLS = [f"./layer/overhang_with_supports/{n}.parquet" for n in range(101, 281, 1)]
31
+ LAYER_TABLE_BLOCK_URLS = [f"./layer/block/{n}.parquet" for n in range(281, 381, 1)]
32
+ LAYER_TABLE_OVERHANG_NO_SUPPORTS_URLS = [f"./layer/overhang_no_supports/{n}.parquet" for n in range(381, 560, 1)]
33
+
34
  _URLS = {
35
  "part_section": {
36
  "base": "./part_section/BASE.pkl",
 
43
  "block": LAYER_BLOCK_URLS,
44
  "overhang_no_supports": LAYER_OVERHANG_NO_SUPPORTS_URLS,
45
  "overhang_with_supports": LAYER_OVERHANG_WITH_SUPPORTS_URLS,
46
+ },
47
+ "layer_table": {
48
+ "base": LAYER_TABLE_BASE_URLS,
49
+ "block": LAYER_TABLE_BLOCK_URLS,
50
+ "overhang_no_supports": LAYER_TABLE_OVERHANG_NO_SUPPORTS_URLS,
51
+ "overhang_with_supports": LAYER_TABLE_OVERHANG_WITH_SUPPORTS_URLS,
52
  }
53
  }
54
 
 
66
  description="Provides layer-wise attributes of entire dataset",
67
  version=VERSION,
68
  ),
69
+ datasets.BuilderConfig(
70
+ name="layer_table",
71
+ description="Provides parquet layer-wise attributes of entire dataset",
72
+ version=VERSION,
73
+ ),
74
  ]
75
 
76
+ DEFAULT_CONFIG_NAME = "layer_table"
77
 
78
  def _info(self):
79
  features = datasets.Features({
 
152
  with open(path, "rb") as f:
153
  layer = pickle.load(f)
154
  yield index, layer
155
+ elif self.config.name == "layer_table":
156
+ # layer config has multiple files in filepath variable.
157
+ for index, path in enumerate(filepath):
158
+ with open(path, "rb") as f:
159
+ table = pq.read_table(f)
160
+ layer = table.to_pydict()
161
+ non_array = [str, int, float]
162
+ converted_layer = {}
163
+ for key, value in layer.items():
164
+ layer_value = value[0]
165
+ # print(key, type(layer_value))
166
+ if (type(layer_value) in non_array):
167
+ print(key, layer_value)
168
+ converted_layer[key] = layer_value
169
+ elif(isinstance(value, list) and "shape" not in key):
170
+ shape = layer[f"{key}_shape"][0]
171
+ flattened_array = np.array(layer_value)
172
+ array = flattened_array.reshape(shape)
173
+ print(array.shape)
174
+ converted_layer[key] = array
175
+
176
+ yield index, converted_layer
layers_to_arrow_table.ipynb ADDED
@@ -0,0 +1,336 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {},
6
+ "source": [
7
+ "# Part Section to Layers\n",
8
+ "Splits part section file into smaller layer files."
9
+ ]
10
+ },
11
+ {
12
+ "cell_type": "code",
13
+ "execution_count": 1,
14
+ "metadata": {},
15
+ "outputs": [],
16
+ "source": [
17
+ "import matplotlib.pyplot as plt\n",
18
+ "import numpy as np\n",
19
+ "import os\n",
20
+ "import pickle\n",
21
+ "import pyarrow as pa\n",
22
+ "import pyarrow.parquet as pq\n",
23
+ "\n",
24
+ "from tqdm import tqdm "
25
+ ]
26
+ },
27
+ {
28
+ "cell_type": "code",
29
+ "execution_count": 3,
30
+ "metadata": {},
31
+ "outputs": [],
32
+ "source": [
33
+ "CHECK_SHAPE = False\n",
34
+ "\n",
35
+ "layer_folder = \"layer\"\n",
36
+ "layer_table_folder = \"layer_table\"\n",
37
+ "\n",
38
+ "# config_folder = \"base\"\n",
39
+ "# config_folder = \"block\"\n",
40
+ "# config_folder = \"overhang_no_supports\"\n",
41
+ "config_folder = \"overhang_with_supports\""
42
+ ]
43
+ },
44
+ {
45
+ "cell_type": "code",
46
+ "execution_count": null,
47
+ "metadata": {},
48
+ "outputs": [],
49
+ "source": [
50
+ "non_array = [str, int, np.uint8, np.uint16, np.float64]\n",
51
+ "for layer_file in tqdm(os.listdir(f\"{layer_folder}/{config_folder}\")):\n",
52
+ " # print(layer_file)\n",
53
+ " with open(f\"{layer_folder}/{config_folder}/{layer_file}\", \"rb\") as f:\n",
54
+ " layer = pickle.load(f)\n",
55
+ "\n",
56
+ " pydict = {}\n",
57
+ " layer_number = layer[\"layer_number\"]\n",
58
+ " for key, value in layer.items():\n",
59
+ " if (type(value) in non_array):\n",
60
+ " pydict[key] = [value]\n",
61
+ " elif (isinstance(value, np.ndarray)):\n",
62
+ " # print(key, value.shape, type(value.shape))\n",
63
+ " pydict[f\"{key}_shape\"] = [value.shape]\n",
64
+ " pydict[key] = [value.flatten()]\n",
65
+ " else:\n",
66
+ " print(key, type(value))\n",
67
+ " # print(pydict)\n",
68
+ " table = pa.Table.from_pydict(pydict)\n",
69
+ " pq.write_table(table, f\"{layer_table_folder}/{config_folder}/{layer_number}.parquet\")\n",
70
+ " # print(table)\n",
71
+ "\n",
72
+ " if CHECK_SHAPE:\n",
73
+ " radiant_temps = layer[\"radiant_temp\"]\n",
74
+ " radiant_temps_shape = radiant_temps.shape\n",
75
+ " radiant_temps_flat = radiant_temps.flatten()\n",
76
+ " radiant_temps_arrow = pa.array(radiant_temps_flat)\n",
77
+ "\n",
78
+ " radiant_temps_reshaped = radiant_temps_flat.reshape(radiant_temps_shape)\n",
79
+ "\n",
80
+ " # print(radiant_temps_reshaped)\n",
81
+ " arrow_radiant_temps_flat = table[\"radiant_temp\"]\n",
82
+ " # print(\"called\")\n",
83
+ " # print(arrow_radiant_temps_flat[0])\n",
84
+ " arrow_radiant_temps_shape = table[\"radiant_temp_shape\"][0]\n",
85
+ "\n",
86
+ " # print(arrow_radiant_temps_flat)\n",
87
+ " arrow_radiant_temps_reshaped = arrow_radiant_temps_flat.reshape(arrow_radiant_temps_shape)\n",
88
+ "\n",
89
+ " plt.imshow(radiant_temps[100])\n",
90
+ " plt.show()\n",
91
+ "\n",
92
+ " plt.imshow(radiant_temps_reshaped[100])\n",
93
+ " plt.show()\n",
94
+ "\n",
95
+ " plt.imshow(arrow_radiant_temps_reshaped[100])\n",
96
+ " plt.show()\n"
97
+ ]
98
+ },
99
+ {
100
+ "cell_type": "code",
101
+ "execution_count": 4,
102
+ "metadata": {},
103
+ "outputs": [],
104
+ "source": [
105
+ "layer_number = 102\n",
106
+ "table = pq.read_table(f\"{layer_table_folder}/{config_folder}/{layer_number}.parquet\")"
107
+ ]
108
+ },
109
+ {
110
+ "cell_type": "code",
111
+ "execution_count": 12,
112
+ "metadata": {},
113
+ "outputs": [],
114
+ "source": [
115
+ "layer = table.to_pydict()"
116
+ ]
117
+ },
118
+ {
119
+ "cell_type": "markdown",
120
+ "metadata": {},
121
+ "source": []
122
+ },
123
+ {
124
+ "cell_type": "code",
125
+ "execution_count": 19,
126
+ "metadata": {},
127
+ "outputs": [
128
+ {
129
+ "name": "stdout",
130
+ "output_type": "stream",
131
+ "text": [
132
+ "folder_layer_range 101-110\n",
133
+ "part OverhangPart\n",
134
+ "part_section OVERHANG-05deg\n",
135
+ "process LPBFthermography\n",
136
+ "source NIST\n",
137
+ "supports wSup\n",
138
+ "layer_number 102\n",
139
+ "(3, 664)\n",
140
+ "contact_email jarred.heigel@nist.gov\n",
141
+ "file_name 20180801_OverhangStudy_Layer102.mat\n",
142
+ "hatch_spacing 100\n",
143
+ "laser_power 195\n",
144
+ "layer_thickness 20\n",
145
+ "material IN625\n",
146
+ "(664, 126, 360)\n",
147
+ "(1, 664)\n",
148
+ "(2, 1)\n",
149
+ "s_hvariable__a 2.655\n",
150
+ "s_hvariable__b -800.7\n",
151
+ "s_hvariable__c 1940000.0\n",
152
+ "scan_speed 800\n",
153
+ "website nist.gov/el/lpbf-thermography/3D-part-builds/OverhangPart-IN625\n"
154
+ ]
155
+ }
156
+ ],
157
+ "source": [
158
+ "non_array = [str, int, float]\n",
159
+ "converted_layer = {}\n",
160
+ "for key, value in layer.items():\n",
161
+ " layer_value = value[0]\n",
162
+ " # print(key, type(layer_value))\n",
163
+ " if (type(layer_value) in non_array):\n",
164
+ " print(key, layer_value)\n",
165
+ " converted_layer[key] = layer_value\n",
166
+ " elif(isinstance(value, list) and \"shape\" not in key):\n",
167
+ " shape = layer[f\"{key}_shape\"][0]\n",
168
+ " flattened_array = np.array(layer_value)\n",
169
+ " array = flattened_array.reshape(shape)\n",
170
+ " print(array.shape)\n",
171
+ " converted_layer[key] = array\n"
172
+ ]
173
+ },
174
+ {
175
+ "cell_type": "code",
176
+ "execution_count": 28,
177
+ "metadata": {},
178
+ "outputs": [
179
+ {
180
+ "name": "stdout",
181
+ "output_type": "stream",
182
+ "text": [
183
+ "folder_layer_range <class 'str'>\n",
184
+ "part <class 'str'>\n",
185
+ "part_section <class 'str'>\n",
186
+ "process <class 'str'>\n",
187
+ "source <class 'str'>\n",
188
+ "supports <class 'str'>\n",
189
+ "layer_number <class 'int'>\n",
190
+ "build_time <class 'numpy.ndarray'>\n",
191
+ "(3, 664)\n",
192
+ "contact_email <class 'str'>\n",
193
+ "file_name <class 'str'>\n",
194
+ "hatch_spacing <class 'int'>\n",
195
+ "laser_power <class 'int'>\n",
196
+ "layer_thickness <class 'int'>\n",
197
+ "material <class 'str'>\n",
198
+ "radiant_temp <class 'numpy.ndarray'>\n",
199
+ "(664, 126, 360)\n",
200
+ "raw_frame_number <class 'numpy.ndarray'>\n",
201
+ "(1, 664)\n",
202
+ "resolution <class 'numpy.ndarray'>\n",
203
+ "(2, 1)\n",
204
+ "s_hvariable__a <class 'float'>\n",
205
+ "s_hvariable__b <class 'float'>\n",
206
+ "s_hvariable__c <class 'float'>\n",
207
+ "scan_speed <class 'int'>\n",
208
+ "website <class 'str'>\n"
209
+ ]
210
+ }
211
+ ],
212
+ "source": [
213
+ "for key, value in converted_layer.items():\n",
214
+ " print(key, type(value))\n",
215
+ " if(isinstance(value, np.ndarray)):\n",
216
+ " print(value.shape)\n"
217
+ ]
218
+ },
219
+ {
220
+ "cell_type": "code",
221
+ "execution_count": null,
222
+ "metadata": {},
223
+ "outputs": [],
224
+ "source": [
225
+ "print(\"called\", table)\n",
226
+ "arrow_radiant_temps_flat = np.array(table[\"radiant_temp\"][0].as_py())"
227
+ ]
228
+ },
229
+ {
230
+ "cell_type": "code",
231
+ "execution_count": null,
232
+ "metadata": {},
233
+ "outputs": [],
234
+ "source": [
235
+ "# arrow_radiant_temps_shape = [int(size) for size in table[\"radiant_temp_shape\"][0]]\n",
236
+ "arrow_radiant_temps_shape = tuple(table[\"radiant_temp_shape\"][0].as_py())\n",
237
+ "print(arrow_radiant_temps_shape)"
238
+ ]
239
+ },
240
+ {
241
+ "cell_type": "code",
242
+ "execution_count": null,
243
+ "metadata": {},
244
+ "outputs": [],
245
+ "source": [
246
+ "arrow_radiant_temps_reshaped = arrow_radiant_temps_flat.reshape(arrow_radiant_temps_shape)\n",
247
+ "print(arrow_radiant_temps_reshaped[100])\n",
248
+ "plt.imshow(arrow_radiant_temps_reshaped[100])\n",
249
+ "plt.show()"
250
+ ]
251
+ },
252
+ {
253
+ "cell_type": "code",
254
+ "execution_count": null,
255
+ "metadata": {},
256
+ "outputs": [],
257
+ "source": [
258
+ "for layer in layers:\n",
259
+ " layer_number = layer[\"layer_number\"]\n",
260
+ " with open(f\"{layers_folder}/{config_folder}/{layer_number}.pkl\", \"wb\") as f:\n",
261
+ " pickle.dump(layer, f)"
262
+ ]
263
+ },
264
+ {
265
+ "cell_type": "code",
266
+ "execution_count": null,
267
+ "metadata": {},
268
+ "outputs": [],
269
+ "source": []
270
+ },
271
+ {
272
+ "cell_type": "code",
273
+ "execution_count": null,
274
+ "metadata": {},
275
+ "outputs": [],
276
+ "source": []
277
+ },
278
+ {
279
+ "cell_type": "code",
280
+ "execution_count": null,
281
+ "metadata": {},
282
+ "outputs": [],
283
+ "source": []
284
+ },
285
+ {
286
+ "cell_type": "code",
287
+ "execution_count": null,
288
+ "metadata": {},
289
+ "outputs": [],
290
+ "source": [
291
+ "layers[0]"
292
+ ]
293
+ },
294
+ {
295
+ "cell_type": "code",
296
+ "execution_count": null,
297
+ "metadata": {},
298
+ "outputs": [],
299
+ "source": [
300
+ "layer_number = 1\n",
301
+ "with open(f\"{layers_folder}/{config_folder}/{layer_number}.pkl\", \"rb\") as f:\n",
302
+ " layer = pickle.load(f)\n",
303
+ "\n",
304
+ "print(layer)"
305
+ ]
306
+ },
307
+ {
308
+ "cell_type": "code",
309
+ "execution_count": null,
310
+ "metadata": {},
311
+ "outputs": [],
312
+ "source": []
313
+ }
314
+ ],
315
+ "metadata": {
316
+ "kernelspec": {
317
+ "display_name": "venv",
318
+ "language": "python",
319
+ "name": "python3"
320
+ },
321
+ "language_info": {
322
+ "codemirror_mode": {
323
+ "name": "ipython",
324
+ "version": 3
325
+ },
326
+ "file_extension": ".py",
327
+ "mimetype": "text/x-python",
328
+ "name": "python",
329
+ "nbconvert_exporter": "python",
330
+ "pygments_lexer": "ipython3",
331
+ "version": "3.12.3"
332
+ }
333
+ },
334
+ "nbformat": 4,
335
+ "nbformat_minor": 2
336
+ }