ppak10 commited on
Commit
ffc4250
·
1 Parent(s): 389ba93

Adds `.gitignore` and dataset script.

Browse files
.gitignore ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
NIST-In-Situ-IN625-LPBF-Overhangs_dataset.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datasets
2
+ import os
3
+ import pickle
4
+
5
+ _DESCRIPTION = """\
6
+ In Situ Thermography During Laser Powder Bed Fusion of a Nickel Superalloy 625
7
+ Artifact with Various Overhangs and Supports
8
+ """
9
+
10
+ _URLS = {
11
+ "base": "https://huggingface.co/datasets/ppak10/NIST-In-Situ-IN625-LPBF-Overhangs/resolve/main/data/BASE.pkl",
12
+ "block": "https://huggingface.co/datasets/ppak10/NIST-In-Situ-IN625-LPBF-Overhangs/resolve/main/data/BLOCK.pkl",
13
+ }
14
+
15
+ class NISTInSituIN625LPBFOverhangsDataset(datasets.GeneratorBasedBuilder):
16
+ VERSION = datasets.Version("0.0.1")
17
+
18
+ BUILDER_CONFIGS = [
19
+ datasets.BuilderConfig(
20
+ name="default",
21
+ description="Provides layer-wise attributes of entire dataset",
22
+ version=VERSION,
23
+ ),
24
+ # datasets.BuilderConfig(
25
+ # name="images",
26
+ # description="Provides layer-wise attributes of entire dataset",
27
+ # version=VERSION,
28
+ # ),
29
+ ]
30
+
31
+ DEFAULT_CONFIG_NAME = "default"
32
+
33
+ def _info(self):
34
+ if self.config.name == "default":
35
+ features = datasets.Features({
36
+ "folder_layer_range": datasets.Value("string"),
37
+ "part": datasets.Value("string"),
38
+ "part_section": datasets.Value("string"),
39
+ "process": datasets.Value("string"),
40
+ "source": datasets.Value("string"),
41
+ "layer_number": datasets.Value("string"),
42
+ "build_time": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
43
+ "contact_email": datasets.Value("string"),
44
+ "file_name": datasets.Value("string"),
45
+ "hatch_spacing": datasets.Value("uint32"),
46
+ "laser_power": datasets.Value("uint32"),
47
+ "layer_thickness": datasets.Value("uint32"),
48
+ "material": datasets.Value("string"),
49
+ "radiant_temp": datasets.Sequence(datasets.Sequence(datasets.Sequence(datasets.Value("uint32")))),
50
+ "build_time": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
51
+ "s_hvariable__a": datasets.Value("float32"),
52
+ "s_hvariable__b": datasets.Value("float32"),
53
+ "s_hvariable__c": datasets.Value("float32"),
54
+ "scan_speed": datasets.Value("uint32"),
55
+ "website": datasets.Value("string"),
56
+ })
57
+ return datasets.DatasetInfo(
58
+ description=_DESCRIPTION,
59
+ features=features,
60
+ )
61
+
62
+ def _split_generators(self, dl_manager):
63
+
64
+ urls = _URLS[self.config.name]
65
+ data_dir = dl_manager.download_and_extract(urls)
66
+ return [
67
+ datasets.SplitGenerator(
68
+ name="base",
69
+ gen_kwargs={
70
+ "filepath": os.path.join(data_dir, "BASE.pkl"),
71
+ "split": "base",
72
+ }
73
+ ),
74
+ datasets.SplitGenerator(
75
+ name="block",
76
+ gen_kwargs={
77
+ "filepath": os.path.join(data_dir, "BLOCK.pkl"),
78
+ "split": "block",
79
+ }
80
+ )
81
+ ]
82
+
83
+ def _generate_examples(self, filepath, split):
84
+ if self.config.name == "default":
85
+ with open(filepath, "rb") as f:
86
+ layers = pickle.load(f)
87
+ for index, layer in enumerate(layers):
88
+ yield index, layer