j-h-f commited on
Commit
cfaa196
·
verified ·
1 Parent(s): f44ecad

Upload WCv1LMDBReader.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. WCv1LMDBReader.py +258 -0
WCv1LMDBReader.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import lmdb
4
+ import gzip
5
+ import torch
6
+ import logging
7
+ import numpy as np
8
+ from enum import Enum
9
+ import safetensors.torch
10
+ from torch.utils.data import Dataset
11
+ from flumapping.utils.Utils import one_hot_encode, count_classes
12
+
13
+ """
14
+ tensors = {
15
+ "center": torch.tensor(center),
16
+ "wcmap": map_data,
17
+ "B4": rgbnir_data[0, :, :],
18
+ "B3": rgbnir_data[1, :, :],
19
+ "B2": rgbnir_data[2, :, :],
20
+ "B8": rgbnir_data[3, :, :],
21
+ "B11": swir_data[0, :, :],
22
+ "B12": swir_data[1, :, :],
23
+ "S1VV": s1_data[0, :, :],
24
+ "S1VH": s1_data[1, :, :],
25
+ "classprops": torch.tensor(count_classes(map_data)),
26
+ }
27
+ """
28
+
29
+
30
+ class Bands(Enum):
31
+ B2 = "B2" # blue
32
+ B3 = "B3" # green
33
+ B4 = "B4" # red
34
+ B8 = "B8" # VNIR
35
+ B11 = "B11" # SWIR
36
+ B12 = "B12" # SWIR
37
+ S1VV = "S1VV"
38
+ S1VH = "S1VH"
39
+ RGBNIR = "RGBNIR"
40
+ SWIR = "SWIR"
41
+ S1 = "S1"
42
+ ALL = "ALL"
43
+
44
+
45
+ class WCv1LMDBReader(Dataset):
46
+ def __init__(self,
47
+ map_lmdb_file: os.PathLike,
48
+ split: str,
49
+ return_key=False,
50
+ readonly=True,
51
+ lmdb_size_limit=8*1024**3,
52
+ key_type=str,
53
+ max_len=-1,
54
+ return_type=tuple,
55
+ output_bands: list = [Bands.RGBNIR],
56
+ transforms=None
57
+ ):
58
+ super().__init__()
59
+ self.map_lmdb_file = map_lmdb_file
60
+ self.env: lmdb.Environment | None = None
61
+ self.db = None
62
+ self.return_key = return_key
63
+ self.logger = logging.getLogger(__name__)
64
+ self.readonly = readonly
65
+ self._keys = None
66
+ self.lmdb_size_limit = lmdb_size_limit
67
+ self.key_type = key_type
68
+ self.max_len = max_len
69
+ self.return_type = return_type
70
+ # self.configure_output(output_bands)
71
+ self. transforms = transforms
72
+ self.split = split
73
+
74
+ if split is not None:
75
+ assert split in ['train', 'val', 'test'], f"unrecignized split type. Expected one of ['train', 'val', 'test'] but got {split}"
76
+
77
+ # self.keys()
78
+ self.configure_output(output_bands)
79
+
80
+
81
+ def configure_output(self, output_bands):
82
+ output = []
83
+ for band in output_bands:
84
+ if band == Bands.RGBNIR:
85
+ output += [Bands.B2, Bands.B3, Bands.B4, Bands.B8]
86
+ elif band == Bands.SWIR:
87
+ output += [Bands.B11, Bands.B12]
88
+ elif band == Bands.S1:
89
+ output += [Bands.S1VV, Bands.S1VH]
90
+ elif band == Bands.ALL:
91
+ output = [Bands.B2, Bands.B3, Bands.B4, Bands.B8,
92
+ Bands.B11, Bands.B12, Bands.S1VV, Bands.S1VH]
93
+ else:
94
+ if band not in output:
95
+ output.append(band)
96
+ self.output_bands = output
97
+
98
+ def set_transforms(self, transfomrs):
99
+ self.transforms = transfomrs
100
+
101
+ def set_max_len(self, max_len):
102
+ self.max_len = max_len
103
+ self.keys()
104
+
105
+ def open_env(self):
106
+ try:
107
+ if self.env is None:
108
+ self.logger.info(
109
+ f"Opening LMDB environment at {self.map_lmdb_file} ...")
110
+ self.env = lmdb.open(
111
+ self.map_lmdb_file,
112
+ readonly=self.readonly,
113
+ lock=not self.readonly,
114
+ meminit=False,
115
+ readahead=True,
116
+ map_size=self.lmdb_size_limit,
117
+ max_spare_txns=18,
118
+ max_dbs=3
119
+ )
120
+ if self.split is not None:
121
+ self.db = self.env.open_db(self.split.encode())
122
+ else:
123
+ self.db = None
124
+ except Exception as err:
125
+ raise err
126
+
127
+ def keys(self, update: bool = False):
128
+ self.open_env()
129
+
130
+ if self._keys is None or update:
131
+ logging.info("(Re-)Reading keys")
132
+ with self.env.begin(db=self.db) as txn:
133
+ self._keys = list(txn.cursor().iternext(values=False))
134
+ if self.key_type == str:
135
+ self._keys = [x.decode() for x in self._keys]
136
+ elif self.key_type == int:
137
+ self._keys = [int.from_bytes(x, 'big') for x in self._keys]
138
+ if self.max_len > 0:
139
+ idxs = np.random.choice(np.arange(len(self._keys)), self.max_len)
140
+ self._keys = np.asarray(self._keys)[idxs]
141
+ return self._keys
142
+
143
+ def _encode_key(self, key):
144
+ if self.key_type == str:
145
+ return key.encode()
146
+ if self.key_type == int:
147
+ return key.to_bytes(sys.getsizeof(key), 'big')
148
+ return None
149
+
150
+ def close_env(self):
151
+ if self.env is not None:
152
+ self.env.close()
153
+ self.env = None
154
+
155
+ def update_key(self, key: str, updateData: dict, compress=False) -> bool:
156
+ try:
157
+ if self.env is None:
158
+ self.logger.info("LMDB not yet opened")
159
+ self.logger.info("Open LMDB")
160
+ self.open_env()
161
+ with self.env.begin(write=True, db=self.db) as txn:
162
+ byte_data = safetensors.torch.save(updateData)
163
+ if compress:
164
+ byte_data = gzip.compress(byte_data)
165
+ status = txn.put(self._encode_key(key), byte_data)
166
+ return status
167
+ except Exception as ex:
168
+ print(ex)
169
+ self.logger.error(ex)
170
+ raise ex
171
+
172
+ def delete_key(self, index:str|int):
173
+ if type(index) == int and self.key_type != int:
174
+ key = self._keys[index]
175
+ else:
176
+ key = index
177
+ with self.env.begin(write=True, buffers=True, db=self.db) as txn:
178
+ status = txn.delete(self._encode_key(key))
179
+ self.keys()
180
+ return status
181
+
182
+ def __len__(self):
183
+ if self._keys is None:
184
+ self.logger.info("keys are not loaded yet")
185
+ self.logger.info("Loading keys")
186
+ self.keys()
187
+ if self.max_len > 0:
188
+ return self.max_len
189
+ else:
190
+ return len(self._keys)
191
+
192
+ def __getitem__(self, index: int | str):
193
+ assert type(index) == int or type(
194
+ index) == str, f"index can only be of type int or str. Got {type(index)}"
195
+
196
+ if self.env is None:
197
+ self.logger.info("LMDB not yet opened")
198
+ self.logger.info("Open LMDB")
199
+ try:
200
+ self.open_env()
201
+ except Exception as err:
202
+ raise err
203
+
204
+ key = None
205
+ with self.env.begin(write=False, buffers=True) as txn:
206
+ if type(index) == int and self.key_type != int:
207
+ byte_data = txn.get(self._encode_key(self._keys[index]), db=self.db)
208
+ key = self._keys[index]
209
+ else:
210
+ byte_data = txn.get(self._encode_key(index), db=self.db)
211
+ key = index
212
+ magic_number = b'\x1f\x8b'
213
+
214
+ try:
215
+ # check if byte_data is gzip-compressed
216
+ if (bytes([byte_data[0], byte_data[1]]) == magic_number):
217
+ tensor_dict = safetensors.torch.load(
218
+ gzip.decompress(byte_data))
219
+ else:
220
+ tensor_dict = safetensors.torch.load(bytes(byte_data))
221
+ except KeyError as e:
222
+ print(e)
223
+
224
+ # print(tensor_dict.keys())
225
+ # print(self.output_bands)
226
+
227
+ if "wcmap" in tensor_dict.keys():
228
+ num_no_data = torch.where(tensor_dict['wcmap'] == 0, 1, 0)
229
+ # if (torch.sum(num_no_data) > 0):
230
+ # print(f"{key} contains nodata")
231
+ class_values = [10, 20, 30, 40, 50, 60, 70, 80, 90, 95, 100]
232
+ label = tensor_dict['wcmap'].clone()
233
+ for i, val in enumerate(class_values):
234
+ label = torch.where(
235
+ label == val, i, label)
236
+ label = torch.nn.functional.one_hot(
237
+ label.squeeze().type(torch.LongTensor), 11).permute(2, 0, 1)
238
+
239
+ if self.return_key:
240
+ tensor_dict['key'] = key
241
+ if self.return_type == dict:
242
+ return tensor_dict
243
+ else:
244
+ bands = []
245
+ for band in self.output_bands:
246
+ bands.append(tensor_dict[band.value])
247
+ rs_image = torch.stack(bands, 0).type(torch.float32)
248
+
249
+ if self.transforms is not None:
250
+ image = self.transforms(rs_image)
251
+ else:
252
+ image = rs_image
253
+
254
+ if "classprops" in tensor_dict.keys():
255
+ class_props = tensor_dict['classprops']
256
+ else:
257
+ class_props = count_classes(tensor_dict['wcmap'])
258
+ return (image, label, class_props)