Fahimeh Orvati Nia commited on
Commit
31ddfa7
·
1 Parent(s): 95a536e
__pycache__/wrapper.cpython-312.pyc ADDED
Binary file (3.49 kB). View file
 
app.py CHANGED
@@ -5,7 +5,6 @@ from wrapper import run_pipeline_on_image
5
  import numpy as np
6
  from PIL import Image
7
  from itertools import product
8
- import logging
9
 
10
  def process(image_path):
11
  if not image_path:
@@ -16,26 +15,8 @@ def process(image_path):
16
  ext = src.suffix.lstrip('.') or 'tif'
17
  img_path = Path(tmpdir) / f"input.{ext}"
18
  try:
19
- try:
20
- # Log original file stats
21
- size_bytes = src.stat().st_size if src.exists() else 0
22
- try:
23
- with Image.open(src) as im_src:
24
- frames = getattr(im_src, 'n_frames', 1)
25
- logging.info(f"Uploaded file: path={src}, size_bytes={size_bytes}, mode={im_src.mode}, size={im_src.size}, frames={frames}")
26
- except Exception:
27
- logging.info(f"Uploaded file: path={src}, size_bytes={size_bytes} (PIL open failed)")
28
- except Exception:
29
- pass
30
  img_bytes = src.read_bytes()
31
  img_path.write_bytes(img_bytes)
32
- # Log copied file as read by PIL
33
- try:
34
- with Image.open(img_path) as im_tmp:
35
- frames_tmp = getattr(im_tmp, 'n_frames', 1)
36
- logging.info(f"Temp input: path={img_path}, mode={im_tmp.mode}, size={im_tmp.size}, frames={frames_tmp}")
37
- except Exception:
38
- logging.info(f"Temp input: path={img_path} (PIL open failed)")
39
  except Exception:
40
  # Fallback: save via PIL if direct copy fails
41
  Image.open(src).save(img_path)
@@ -46,7 +27,7 @@ def process(image_path):
46
  if not path_str:
47
  return None
48
  im = Image.open(path_str)
49
- im = im.convert('RGB')
50
  copied = im.copy()
51
  im.close()
52
  return copied
 
5
  import numpy as np
6
  from PIL import Image
7
  from itertools import product
 
8
 
9
  def process(image_path):
10
  if not image_path:
 
15
  ext = src.suffix.lstrip('.') or 'tif'
16
  img_path = Path(tmpdir) / f"input.{ext}"
17
  try:
 
 
 
 
 
 
 
 
 
 
 
18
  img_bytes = src.read_bytes()
19
  img_path.write_bytes(img_bytes)
 
 
 
 
 
 
 
20
  except Exception:
21
  # Fallback: save via PIL if direct copy fails
22
  Image.open(src).save(img_path)
 
27
  if not path_str:
28
  return None
29
  im = Image.open(path_str)
30
+ # im = im.convert('RGB')
31
  copied = im.copy()
32
  im.close()
33
  return copied
sorghum_pipeline/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (461 Bytes). View file
 
sorghum_pipeline/__pycache__/config.cpython-312.pyc ADDED
Binary file (2.55 kB). View file
 
sorghum_pipeline/__pycache__/pipeline.cpython-312.pyc ADDED
Binary file (9.78 kB). View file
 
sorghum_pipeline/data/__pycache__/preprocessor.cpython-312.pyc CHANGED
Binary files a/sorghum_pipeline/data/__pycache__/preprocessor.cpython-312.pyc and b/sorghum_pipeline/data/__pycache__/preprocessor.cpython-312.pyc differ
 
sorghum_pipeline/data/preprocessor.py CHANGED
@@ -27,10 +27,7 @@ class ImagePreprocessor:
27
  boxes = [(j, i, j + d, i + d)
28
  for i, j in product(range(0, pil_img.height, d),
29
  range(0, pil_img.width, d))]
30
- # Convert each quadrant to grayscale so each band is single-channel
31
- stack = np.stack([
32
- np.array(pil_img.crop(b).convert('L'), dtype=float)
33
- for b in boxes], axis=-1)
34
  # bands come in order: [green, red, red_edge, nir]
35
  green, red, red_edge, nir = np.split(stack, 4, axis=-1)
36
  # build composite using bands: Red, Red-Edge, Green
 
27
  boxes = [(j, i, j + d, i + d)
28
  for i, j in product(range(0, pil_img.height, d),
29
  range(0, pil_img.width, d))]
30
+ stack = np.stack([np.array(pil_img.crop(b), float) for b in boxes], axis=-1)
 
 
 
31
  # bands come in order: [green, red, red_edge, nir]
32
  green, red, red_edge, nir = np.split(stack, 4, axis=-1)
33
  # build composite using bands: Red, Red-Edge, Green
sorghum_pipeline/features/__pycache__/morphology.cpython-312.pyc CHANGED
Binary files a/sorghum_pipeline/features/__pycache__/morphology.cpython-312.pyc and b/sorghum_pipeline/features/__pycache__/morphology.cpython-312.pyc differ
 
sorghum_pipeline/pipeline.py CHANGED
@@ -13,7 +13,6 @@ from .data import ImagePreprocessor, MaskHandler
13
  from .features import TextureExtractor, VegetationIndexExtractor, MorphologyExtractor
14
  from .output import OutputManager
15
  from .segmentation import SegmentationManager
16
- from .features.morphology import MorphologyExtractor
17
 
18
  logger = logging.getLogger(__name__)
19
 
@@ -55,11 +54,6 @@ class SorghumPipeline:
55
 
56
  # Load image
57
  img = Image.open(single_image_path)
58
- try:
59
- frames = getattr(img, 'n_frames', 1)
60
- except Exception:
61
- frames = 1
62
- logger.info(f"Loaded input: path={single_image_path}, mode={img.mode}, size={img.size}, frames={frames}")
63
  plants = {
64
  "demo": {
65
  "raw_image": (img, Path(single_image_path).name),
@@ -104,14 +98,7 @@ class SorghumPipeline:
104
  green_band = None
105
  spectral = pdata.get('spectral_stack', {})
106
  if 'green' in spectral:
107
- gb = spectral['green']
108
- gb = np.asarray(gb)
109
- # robustly collapse to 2D if it arrived as 3-channel
110
- if gb.ndim == 3 and gb.shape[2] > 1:
111
- gb = gb[..., 0]
112
- elif gb.ndim == 3 and gb.shape[2] == 1:
113
- gb = gb.squeeze(-1)
114
- green_band = gb.astype(np.float64)
115
  if mask is not None:
116
  valid = np.where(mask > 0, green_band, np.nan)
117
  else:
@@ -132,13 +119,9 @@ class SorghumPipeline:
132
  else:
133
  pdata['vegetation_indices'] = {}
134
 
135
- # Morphology: compute size analysis image via internal extractor
136
- try:
137
- pdata['morphology_features'] = self.morphology_extractor.extract_morphology_features(
138
- cv2.cvtColor(composite, cv2.COLOR_BGR2RGB), mask
139
- )
140
- except Exception:
141
- pdata['morphology_features'] = {}
142
 
143
  return plants
144
 
@@ -150,14 +133,7 @@ class SorghumPipeline:
150
  if not all(b in spectral for b in bands):
151
  continue
152
 
153
- arrays = []
154
- for b in bands:
155
- arr = np.asarray(spectral[b])
156
- if arr.ndim == 3 and arr.shape[2] > 1:
157
- arr = arr[..., 0]
158
- elif arr.ndim == 3 and arr.shape[2] == 1:
159
- arr = arr.squeeze(-1)
160
- arrays.append(arr.astype(np.float64))
161
  values = self.vegetation_extractor.index_formulas[name](*arrays).astype(np.float64)
162
  binary_mask = (mask > 0)
163
  masked_values = np.where(binary_mask, values, np.nan)
 
13
  from .features import TextureExtractor, VegetationIndexExtractor, MorphologyExtractor
14
  from .output import OutputManager
15
  from .segmentation import SegmentationManager
 
16
 
17
  logger = logging.getLogger(__name__)
18
 
 
54
 
55
  # Load image
56
  img = Image.open(single_image_path)
 
 
 
 
 
57
  plants = {
58
  "demo": {
59
  "raw_image": (img, Path(single_image_path).name),
 
98
  green_band = None
99
  spectral = pdata.get('spectral_stack', {})
100
  if 'green' in spectral:
101
+ green_band = spectral['green'].squeeze(-1).astype(np.float64)
 
 
 
 
 
 
 
102
  if mask is not None:
103
  valid = np.where(mask > 0, green_band, np.nan)
104
  else:
 
119
  else:
120
  pdata['vegetation_indices'] = {}
121
 
122
+ # # Morphology: PlantCV size analysis (COMMENTED OUT)
123
+ # pdata['morphology_features'] = self.morphology_extractor.extract_morphology_features(composite, mask)
124
+ pdata['morphology_features'] = {}
 
 
 
 
125
 
126
  return plants
127
 
 
133
  if not all(b in spectral for b in bands):
134
  continue
135
 
136
+ arrays = [np.asarray(spectral[b].squeeze(-1), dtype=np.float64) for b in bands]
 
 
 
 
 
 
 
137
  values = self.vegetation_extractor.index_formulas[name](*arrays).astype(np.float64)
138
  binary_mask = (mask > 0)
139
  masked_values = np.where(binary_mask, values, np.nan)
sorghum_pipeline/segmentation/manager.py CHANGED
@@ -48,15 +48,22 @@ class SegmentationManager:
48
  def segment_image_soft(self, image: np.ndarray) -> np.ndarray:
49
  """Segment image and return soft mask [0,1]."""
50
  try:
 
51
  rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
52
  pil_image = Image.fromarray(rgb_image)
53
  input_tensor = self.transform(pil_image).unsqueeze(0).to(self.device)
 
 
 
 
54
 
55
  with torch.no_grad():
56
  preds = self.model(input_tensor)[-1].sigmoid().cpu()[0].squeeze(0).numpy()
 
57
 
58
  original_size = (image.shape[1], image.shape[0])
59
  soft_mask = cv2.resize(preds.astype(np.float32), original_size, interpolation=cv2.INTER_LINEAR)
 
60
  return np.clip(soft_mask, 0.0, 1.0)
61
  except Exception as e:
62
  logger.error(f"Segmentation failed: {e}")
 
48
  def segment_image_soft(self, image: np.ndarray) -> np.ndarray:
49
  """Segment image and return soft mask [0,1]."""
50
  try:
51
+ logger.info(f"Segmentation: input image shape={image.shape}, dtype={image.dtype}")
52
  rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
53
  pil_image = Image.fromarray(rgb_image)
54
  input_tensor = self.transform(pil_image).unsqueeze(0).to(self.device)
55
+ try:
56
+ logger.info(f"Segmentation: tensor shape={input_tensor.shape}, device={self.device}")
57
+ except Exception:
58
+ pass
59
 
60
  with torch.no_grad():
61
  preds = self.model(input_tensor)[-1].sigmoid().cpu()[0].squeeze(0).numpy()
62
+ logger.info(f"Segmentation: raw preds shape={preds.shape}, dtype={preds.dtype}")
63
 
64
  original_size = (image.shape[1], image.shape[0])
65
  soft_mask = cv2.resize(preds.astype(np.float32), original_size, interpolation=cv2.INTER_LINEAR)
66
+ logger.info(f"Segmentation: resized soft_mask shape={soft_mask.shape}, dtype={soft_mask.dtype}")
67
  return np.clip(soft_mask, 0.0, 1.0)
68
  except Exception as e:
69
  logger.error(f"Segmentation failed: {e}")
wrapper.py CHANGED
@@ -39,6 +39,15 @@ def run_pipeline_on_image(input_image_path: str, work_dir: str, save_artifacts:
39
 
40
  # Collect outputs
41
  outputs: Dict[str, str] = {}
 
 
 
 
 
 
 
 
 
42
 
43
  # Collect desired vegetation indices (replace ARI with SAVI)
44
  wanted = [
 
39
 
40
  # Collect outputs
41
  outputs: Dict[str, str] = {}
42
+ try:
43
+ # Log immediate output directory contents for debugging
44
+ for sub in ['results', 'Vegetation_indices_images', 'texture_output']:
45
+ p = work / sub
46
+ if p.exists():
47
+ files = sorted([str(x.name) for x in p.iterdir() if x.is_file()])
48
+ print(f"Artifacts in {sub}: {files}")
49
+ except Exception:
50
+ pass
51
 
52
  # Collect desired vegetation indices (replace ARI with SAVI)
53
  wanted = [