Fahimeh Orvati Nia
commited on
Commit
·
08b06f5
1
Parent(s):
10bba96
update
Browse files
sorghum_pipeline/data/preprocessor.py
CHANGED
|
@@ -27,12 +27,21 @@ class ImagePreprocessor:
|
|
| 27 |
d = pil_img.size[0] // 2
|
| 28 |
boxes = [(j, i, j + d, i + d) for i, j in product(range(0, pil_img.height, d), range(0, pil_img.width, d))]
|
| 29 |
stack = np.stack([np.array(pil_img.crop(box), dtype=float) for box in boxes], axis=-1)
|
| 30 |
-
green, red, red_edge, nir = np.split(stack, 4, axis=-1)
|
| 31 |
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
composite_uint8 = self.convert_to_uint8(composite)
|
| 34 |
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
return composite_uint8, spectral_bands
|
| 37 |
|
| 38 |
def create_composites(self, plants: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
|
|
|
| 27 |
d = pil_img.size[0] // 2
|
| 28 |
boxes = [(j, i, j + d, i + d) for i, j in product(range(0, pil_img.height, d), range(0, pil_img.width, d))]
|
| 29 |
stack = np.stack([np.array(pil_img.crop(box), dtype=float) for box in boxes], axis=-1)
|
|
|
|
| 30 |
|
| 31 |
+
# Split into 4 bands and squeeze the extra dimension
|
| 32 |
+
green, red, red_edge, nir = [band.squeeze(-1) for band in np.split(stack, 4, axis=-1)]
|
| 33 |
+
|
| 34 |
+
# Stack into (H, W, 3) composite: green, red_edge, red
|
| 35 |
+
composite = np.stack([green, red_edge, red], axis=-1)
|
| 36 |
composite_uint8 = self.convert_to_uint8(composite)
|
| 37 |
|
| 38 |
+
# Keep spectral bands with single channel dimension for consistency
|
| 39 |
+
spectral_bands = {
|
| 40 |
+
"green": green[..., np.newaxis],
|
| 41 |
+
"red": red[..., np.newaxis],
|
| 42 |
+
"red_edge": red_edge[..., np.newaxis],
|
| 43 |
+
"nir": nir[..., np.newaxis]
|
| 44 |
+
}
|
| 45 |
return composite_uint8, spectral_bands
|
| 46 |
|
| 47 |
def create_composites(self, plants: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|