Fahimeh Orvati Nia
commited on
Commit
·
7d932a4
1
Parent(s):
7c80781
update
Browse files- app.py +15 -2
- sorghum_pipeline/data/preprocessor.py +2 -1
app.py
CHANGED
|
@@ -18,9 +18,22 @@ def show_preview(image):
|
|
| 18 |
arr = np.array(image)
|
| 19 |
# RGBA → RGB
|
| 20 |
if arr.ndim == 3 and arr.shape[2] == 4:
|
| 21 |
-
|
| 22 |
-
|
|
|
|
| 23 |
if arr.ndim == 3 and arr.shape[2] == 3:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
return image
|
| 25 |
# Single-channel or higher bit-depth
|
| 26 |
if arr.ndim == 2 or (arr.ndim == 3 and arr.shape[2] == 1):
|
|
|
|
| 18 |
arr = np.array(image)
|
| 19 |
# RGBA → RGB
|
| 20 |
if arr.ndim == 3 and arr.shape[2] == 4:
|
| 21 |
+
image = image.convert("RGB")
|
| 22 |
+
arr = np.array(image)
|
| 23 |
+
# RGB
|
| 24 |
if arr.ndim == 3 and arr.shape[2] == 3:
|
| 25 |
+
# If high bit-depth or non-uint8, normalize per-channel for visualization
|
| 26 |
+
if arr.dtype != np.uint8 or np.max(arr) > 255:
|
| 27 |
+
a = np.nan_to_num(arr.astype(np.float64), nan=0.0, posinf=0.0, neginf=0.0)
|
| 28 |
+
vis = np.empty_like(a, dtype=np.float64)
|
| 29 |
+
for c in range(3):
|
| 30 |
+
vmin = np.percentile(a[..., c], 1.0)
|
| 31 |
+
vmax = np.percentile(a[..., c], 99.0)
|
| 32 |
+
if not np.isfinite(vmin) or not np.isfinite(vmax) or vmax <= vmin:
|
| 33 |
+
vmin, vmax = float(np.min(a[..., c])), float(np.max(a[..., c]))
|
| 34 |
+
denom = max(vmax - vmin, 1e-6)
|
| 35 |
+
vis[..., c] = np.clip((a[..., c] - vmin) / denom, 0.0, 1.0) * 255.0
|
| 36 |
+
return Image.fromarray(vis.astype(np.uint8), mode='RGB')
|
| 37 |
return image
|
| 38 |
# Single-channel or higher bit-depth
|
| 39 |
if arr.ndim == 2 or (arr.ndim == 3 and arr.shape[2] == 1):
|
sorghum_pipeline/data/preprocessor.py
CHANGED
|
@@ -31,7 +31,8 @@ class ImagePreprocessor:
|
|
| 31 |
boxes = [(j, i, j + d, i + d)
|
| 32 |
for i, j in product(range(0, pil_img.height, d),
|
| 33 |
range(0, pil_img.width, d))]
|
| 34 |
-
|
|
|
|
| 35 |
green, red, red_edge, nir = np.split(stack, 4, axis=-1)
|
| 36 |
|
| 37 |
# Build BGR composite so that displayed RGB = (red, red_edge, green)
|
|
|
|
| 31 |
boxes = [(j, i, j + d, i + d)
|
| 32 |
for i, j in product(range(0, pil_img.height, d),
|
| 33 |
range(0, pil_img.width, d))]
|
| 34 |
+
# Ensure each quadrant is single-channel (grayscale) so bands are 2D
|
| 35 |
+
stack = np.stack([np.array(pil_img.crop(b).convert('L'), float) for b in boxes], axis=-1)
|
| 36 |
green, red, red_edge, nir = np.split(stack, 4, axis=-1)
|
| 37 |
|
| 38 |
# Build BGR composite so that displayed RGB = (red, red_edge, green)
|