Fahimeh Orvati Nia
commited on
Commit
·
5f6c42c
1
Parent(s):
3c8af25
update
Browse files- app.py +4 -2
- sorghum_pipeline/data/preprocessor.py +39 -37
- sorghum_pipeline/output/manager.py +11 -0
- wrapper.py +3 -0
app.py
CHANGED
|
@@ -41,10 +41,11 @@ def process(image):
|
|
| 41 |
|
| 42 |
overlay = load_pil(outputs.get('Overlay'))
|
| 43 |
mask = load_pil(outputs.get('Mask'))
|
|
|
|
| 44 |
order = ['NDVI', 'ARI', 'GNDVI']
|
| 45 |
gallery_items = [load_pil(outputs[k]) for k in order if k in outputs]
|
| 46 |
stats_text = outputs.get('StatsText', '')
|
| 47 |
-
return overlay, mask, gallery_items, stats_text
|
| 48 |
|
| 49 |
with gr.Blocks() as demo:
|
| 50 |
gr.Markdown("# 🌿 Sorghum Plant Analysis Demo")
|
|
@@ -58,6 +59,7 @@ with gr.Blocks() as demo:
|
|
| 58 |
preview = gr.Image(type="pil", label="Uploaded Image Preview", interactive=False)
|
| 59 |
|
| 60 |
with gr.Row():
|
|
|
|
| 61 |
overlay_img = gr.Image(type="pil", label="Segmentation Overlay", interactive=False)
|
| 62 |
mask_img = gr.Image(type="pil", label="Mask", interactive=False)
|
| 63 |
|
|
@@ -66,7 +68,7 @@ with gr.Blocks() as demo:
|
|
| 66 |
|
| 67 |
# Update preview when image is uploaded
|
| 68 |
inp.change(fn=show_preview, inputs=inp, outputs=preview)
|
| 69 |
-
run.click(process, inputs=inp, outputs=[overlay_img, mask_img, gallery, stats])
|
| 70 |
|
| 71 |
if __name__ == "__main__":
|
| 72 |
demo.launch()
|
|
|
|
| 41 |
|
| 42 |
overlay = load_pil(outputs.get('Overlay'))
|
| 43 |
mask = load_pil(outputs.get('Mask'))
|
| 44 |
+
composite = load_pil(outputs.get('Composite'))
|
| 45 |
order = ['NDVI', 'ARI', 'GNDVI']
|
| 46 |
gallery_items = [load_pil(outputs[k]) for k in order if k in outputs]
|
| 47 |
stats_text = outputs.get('StatsText', '')
|
| 48 |
+
return composite, overlay, mask, gallery_items, stats_text
|
| 49 |
|
| 50 |
with gr.Blocks() as demo:
|
| 51 |
gr.Markdown("# 🌿 Sorghum Plant Analysis Demo")
|
|
|
|
| 59 |
preview = gr.Image(type="pil", label="Uploaded Image Preview", interactive=False)
|
| 60 |
|
| 61 |
with gr.Row():
|
| 62 |
+
composite_img = gr.Image(type="pil", label="Composite (Segmentation Input)", interactive=False)
|
| 63 |
overlay_img = gr.Image(type="pil", label="Segmentation Overlay", interactive=False)
|
| 64 |
mask_img = gr.Image(type="pil", label="Mask", interactive=False)
|
| 65 |
|
|
|
|
| 68 |
|
| 69 |
# Update preview when image is uploaded
|
| 70 |
inp.change(fn=show_preview, inputs=inp, outputs=preview)
|
| 71 |
+
run.click(process, inputs=inp, outputs=[composite_img, overlay_img, mask_img, gallery, stats])
|
| 72 |
|
| 73 |
if __name__ == "__main__":
|
| 74 |
demo.launch()
|
sorghum_pipeline/data/preprocessor.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
"""Minimal image preprocessing."""
|
| 2 |
|
| 3 |
import numpy as np
|
| 4 |
from PIL import Image
|
|
@@ -13,47 +13,49 @@ class ImagePreprocessor:
|
|
| 13 |
self.target_size = target_size
|
| 14 |
|
| 15 |
def convert_to_uint8(self, arr: np.ndarray) -> np.ndarray:
|
| 16 |
-
"""
|
| 17 |
-
|
| 18 |
-
ptp = np.ptp(
|
| 19 |
-
if ptp
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
return np.clip(normalized, 0, 255).astype(np.uint8)
|
| 24 |
|
| 25 |
def process_raw_image(self, pil_img: Image.Image) -> Tuple[np.ndarray, Dict[str, np.ndarray]]:
|
| 26 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
d = pil_img.size[0] // 2
|
| 28 |
-
boxes = [(j, i, j + d, i + d)
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
composite_uint8 = self.convert_to_uint8(composite)
|
| 41 |
-
|
| 42 |
-
# Keep spectral bands with single channel dimension for consistency
|
| 43 |
-
spectral_bands = {
|
| 44 |
-
"green": green[..., np.newaxis],
|
| 45 |
-
"red": red[..., np.newaxis],
|
| 46 |
-
"red_edge": red_edge[..., np.newaxis],
|
| 47 |
-
"nir": nir[..., np.newaxis]
|
| 48 |
-
}
|
| 49 |
-
return composite_uint8, spectral_bands
|
| 50 |
|
| 51 |
def create_composites(self, plants: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
| 52 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
for key, pdata in plants.items():
|
| 54 |
if "raw_image" in pdata:
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
pdata["
|
| 58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
return plants
|
|
|
|
| 1 |
+
"""Minimal image preprocessing following the requested composite/spectral logic."""
|
| 2 |
|
| 3 |
import numpy as np
|
| 4 |
from PIL import Image
|
|
|
|
| 13 |
self.target_size = target_size
|
| 14 |
|
| 15 |
def convert_to_uint8(self, arr: np.ndarray) -> np.ndarray:
|
| 16 |
+
"""Normalize any array to 8-bit uint8 in the range [0, 255]."""
|
| 17 |
+
a = np.nan_to_num(arr, nan=0.0, posinf=0.0, neginf=0.0)
|
| 18 |
+
ptp = np.ptp(a)
|
| 19 |
+
if ptp == 0:
|
| 20 |
+
return np.zeros_like(a, dtype=np.uint8)
|
| 21 |
+
norm = (a - a.min()) / (ptp + 1e-6) * 255
|
| 22 |
+
return norm.astype(np.uint8)
|
|
|
|
| 23 |
|
| 24 |
def process_raw_image(self, pil_img: Image.Image) -> Tuple[np.ndarray, Dict[str, np.ndarray]]:
|
| 25 |
+
"""
|
| 26 |
+
Split a 4-band RAW mosaic image into tiles and build:
|
| 27 |
+
- composite: 8-bit BGR array arranged as (green, red_edge, red)
|
| 28 |
+
- spectral stack: dict with keys green, red, red_edge, nir (each HxWx1)
|
| 29 |
+
"""
|
| 30 |
d = pil_img.size[0] // 2
|
| 31 |
+
boxes = [(j, i, j + d, i + d)
|
| 32 |
+
for i, j in product(range(0, pil_img.height, d),
|
| 33 |
+
range(0, pil_img.width, d))]
|
| 34 |
+
stack = np.stack([np.array(pil_img.crop(b), float) for b in boxes], axis=-1)
|
| 35 |
+
green, red, red_edge, nir = np.split(stack, 4, axis=-1)
|
| 36 |
+
|
| 37 |
+
# Build BGR composite so that displayed RGB = (red, red_edge, green)
|
| 38 |
+
comp = np.concatenate([green, red_edge, red], axis=-1)
|
| 39 |
+
comp_uint8 = self.convert_to_uint8(comp)
|
| 40 |
+
|
| 41 |
+
spectral_bands = {"green": green, "red": red, "red_edge": red_edge, "nir": nir}
|
| 42 |
+
return comp_uint8, spectral_bands
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
|
| 44 |
def create_composites(self, plants: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
| 45 |
+
"""
|
| 46 |
+
For each item in `plants`, grab one raw image and attach:
|
| 47 |
+
- pdata["composite"]: 8-bit BGR numpy array
|
| 48 |
+
- pdata["spectral_stack"]: dict of bands returned by process_raw_image
|
| 49 |
+
"""
|
| 50 |
for key, pdata in plants.items():
|
| 51 |
if "raw_image" in pdata:
|
| 52 |
+
im, _ = pdata["raw_image"]
|
| 53 |
+
elif pdata.get("raw_images"):
|
| 54 |
+
im, _ = pdata["raw_images"][0]
|
| 55 |
+
else:
|
| 56 |
+
continue
|
| 57 |
+
|
| 58 |
+
comp, spec = self.process_raw_image(im)
|
| 59 |
+
pdata["composite"] = comp
|
| 60 |
+
pdata["spectral_stack"] = spec
|
| 61 |
return plants
|
sorghum_pipeline/output/manager.py
CHANGED
|
@@ -69,6 +69,17 @@ class OutputManager:
|
|
| 69 |
except Exception as e:
|
| 70 |
logger.error(f"Failed to save overlay: {e}")
|
| 71 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
# 3-5. Vegetation indices (NDVI, ARI, GNDVI)
|
| 73 |
try:
|
| 74 |
veg = plant_data.get('vegetation_indices', {})
|
|
|
|
| 69 |
except Exception as e:
|
| 70 |
logger.error(f"Failed to save overlay: {e}")
|
| 71 |
|
| 72 |
+
# 2b. Composite (input to segmentation)
|
| 73 |
+
try:
|
| 74 |
+
base_image = plant_data.get('composite')
|
| 75 |
+
if isinstance(base_image, np.ndarray):
|
| 76 |
+
# Ensure uint8
|
| 77 |
+
if base_image.dtype != np.uint8:
|
| 78 |
+
base_image = self._normalize_to_uint8(base_image.astype(np.float64))
|
| 79 |
+
cv2.imwrite(str(results_dir / 'composite.png'), base_image)
|
| 80 |
+
except Exception as e:
|
| 81 |
+
logger.error(f"Failed to save composite: {e}")
|
| 82 |
+
|
| 83 |
# 3-5. Vegetation indices (NDVI, ARI, GNDVI)
|
| 84 |
try:
|
| 85 |
veg = plant_data.get('vegetation_indices', {})
|
wrapper.py
CHANGED
|
@@ -60,10 +60,13 @@ def run_pipeline_on_image(input_image_path: str, work_dir: str, save_artifacts:
|
|
| 60 |
# Also include overlay and mask if present
|
| 61 |
overlay_path = work / 'results/overlay.png'
|
| 62 |
mask_path = work / 'results/mask.png'
|
|
|
|
| 63 |
if overlay_path.exists():
|
| 64 |
outputs['Overlay'] = str(overlay_path)
|
| 65 |
if mask_path.exists():
|
| 66 |
outputs['Mask'] = str(mask_path)
|
|
|
|
|
|
|
| 67 |
|
| 68 |
# Extract simple stats for display if present in pipeline results
|
| 69 |
try:
|
|
|
|
| 60 |
# Also include overlay and mask if present
|
| 61 |
overlay_path = work / 'results/overlay.png'
|
| 62 |
mask_path = work / 'results/mask.png'
|
| 63 |
+
composite_path = work / 'results/composite.png'
|
| 64 |
if overlay_path.exists():
|
| 65 |
outputs['Overlay'] = str(overlay_path)
|
| 66 |
if mask_path.exists():
|
| 67 |
outputs['Mask'] = str(mask_path)
|
| 68 |
+
if composite_path.exists():
|
| 69 |
+
outputs['Composite'] = str(composite_path)
|
| 70 |
|
| 71 |
# Extract simple stats for display if present in pipeline results
|
| 72 |
try:
|