AI-Manith commited on
Commit
f04d7cf
·
verified ·
1 Parent(s): b8973e6

Upload 3 files

Browse files
src/app.py ADDED
@@ -0,0 +1,869 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ from matplotlib import pyplot as plt
4
+
5
+ class FruitDiseaseDetector:
6
+ def __init__(self):
7
+ self.disease_mask = None
8
+ self.healthy_mask = None
9
+ self.processed_image = None
10
+
11
+ def remove_background(self, image):
12
+ """
13
+ Remove background from fruit image using multiple segmentation techniques
14
+ Returns the fruit mask and the image with background removed
15
+ """
16
+ # Method 1: GrabCut algorithm (most effective for fruits)
17
+ fruit_mask_grabcut = self._grabcut_segmentation(image)
18
+
19
+ # Method 2: Color-based segmentation
20
+ fruit_mask_color = self._color_based_segmentation(image)
21
+
22
+ # Method 3: Edge-based segmentation
23
+ fruit_mask_edge = self._edge_based_segmentation(image)
24
+
25
+ # Combine all methods using voting
26
+ combined_mask = self._combine_masks([fruit_mask_grabcut, fruit_mask_color, fruit_mask_edge])
27
+
28
+ # Post-process the mask
29
+ final_mask = self._post_process_mask(combined_mask)
30
+
31
+ # Apply mask to image
32
+ result_image = image.copy()
33
+ result_image[final_mask == 0] = [0, 0, 0] # Set background to black
34
+
35
+ return final_mask, result_image
36
+
37
+ def _grabcut_segmentation(self, image):
38
+ """Use GrabCut algorithm for mango foreground/background separation"""
39
+ height, width = image.shape[:2]
40
+
41
+ # Initialize mask
42
+ mask = np.zeros((height, width), np.uint8)
43
+
44
+ # Define rectangle around the mango (mangoes are typically oval/elongated)
45
+ # Adjust margins for mango shape - less margin on sides, more on top/bottom
46
+ margin_x = int(width * 0.10) # Reduced horizontal margin for mango width
47
+ margin_y = int(height * 0.12) # Slightly more vertical margin for mango length
48
+ rect = (margin_x, margin_y, width - 2*margin_x, height - 2*margin_y)
49
+
50
+ # Initialize background and foreground models
51
+ bgd_model = np.zeros((1, 65), np.float64)
52
+ fgd_model = np.zeros((1, 65), np.float64)
53
+
54
+ # Apply GrabCut with more iterations for better mango segmentation
55
+ cv2.grabCut(image, mask, rect, bgd_model, fgd_model, 8, cv2.GC_INIT_WITH_RECT)
56
+
57
+ # Create binary mask (0 for background, 1 for foreground)
58
+ fruit_mask = np.where((mask == 2) | (mask == 0), 0, 255).astype(np.uint8)
59
+
60
+ return fruit_mask
61
+
62
+ def _color_based_segmentation(self, image):
63
+ """Segment mango using mango-specific color characteristics"""
64
+ # Convert to HSV for better color segmentation
65
+ hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
66
+
67
+ # Mango-specific color ranges (calibrated for mango dataset)
68
+ # Green mangoes (unripe) - broader green range for mangoes
69
+ lower_green1 = np.array([35, 25, 25])
70
+ upper_green1 = np.array([85, 255, 255])
71
+
72
+ # Yellow/Orange mangoes (ripe) - extended range for mango ripeness
73
+ lower_yellow = np.array([10, 30, 30])
74
+ upper_yellow = np.array([35, 255, 255])
75
+
76
+ # Red/Orange mangoes (very ripe) - specific to mango varieties
77
+ lower_red1 = np.array([0, 30, 30])
78
+ upper_red1 = np.array([15, 255, 255])
79
+ lower_red2 = np.array([165, 30, 30])
80
+ upper_red2 = np.array([180, 255, 255])
81
+
82
+ # Yellowish-green mangoes (semi-ripe)
83
+ lower_yellow_green = np.array([25, 20, 40])
84
+ upper_yellow_green = np.array([45, 255, 255])
85
+
86
+ # Create masks for mango color ranges
87
+ mask_green = cv2.inRange(hsv, lower_green1, upper_green1)
88
+ mask_yellow = cv2.inRange(hsv, lower_yellow, upper_yellow)
89
+ mask_red1 = cv2.inRange(hsv, lower_red1, upper_red1)
90
+ mask_red2 = cv2.inRange(hsv, lower_red2, upper_red2)
91
+ mask_yellow_green = cv2.inRange(hsv, lower_yellow_green, upper_yellow_green)
92
+
93
+ # Combine all mango color masks
94
+ fruit_mask = cv2.bitwise_or(mask_green, mask_yellow)
95
+ fruit_mask = cv2.bitwise_or(fruit_mask, mask_red1)
96
+ fruit_mask = cv2.bitwise_or(fruit_mask, mask_red2)
97
+ fruit_mask = cv2.bitwise_or(fruit_mask, mask_yellow_green)
98
+
99
+ # Include areas with moderate saturation and brightness (mango skin variations)
100
+ saturation = hsv[:, :, 1] # Saturation channel
101
+ value = hsv[:, :, 2] # Value channel
102
+
103
+ # Mango skin can have lower saturation but still be fruit
104
+ _, moderate_sat_mask = cv2.threshold(saturation, 30, 255, cv2.THRESH_BINARY)
105
+ _, bright_mask = cv2.threshold(value, 50, 255, cv2.THRESH_BINARY)
106
+
107
+ # Additional mask for pale/light mango areas
108
+ mango_skin_mask = cv2.bitwise_and(moderate_sat_mask, bright_mask)
109
+
110
+ # Final combination optimized for mangoes
111
+ fruit_mask = cv2.bitwise_or(fruit_mask, mango_skin_mask)
112
+
113
+ return fruit_mask
114
+
115
+ def _edge_based_segmentation(self, image):
116
+ """Use edge detection and contour analysis for segmentation"""
117
+ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
118
+
119
+ # Apply Gaussian blur
120
+ blurred = cv2.GaussianBlur(gray, (5, 5), 0)
121
+
122
+ # Edge detection
123
+ edges = cv2.Canny(blurred, 50, 150)
124
+
125
+ # Find contours
126
+ contours, _ = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
127
+
128
+ # Create mask
129
+ mask = np.zeros(gray.shape, np.uint8)
130
+
131
+ if contours:
132
+ # Find the largest contour (assuming it's the fruit)
133
+ largest_contour = max(contours, key=cv2.contourArea)
134
+
135
+ # Only consider if the contour is reasonably large
136
+ if cv2.contourArea(largest_contour) > (gray.shape[0] * gray.shape[1] * 0.1):
137
+ cv2.fillPoly(mask, [largest_contour], 255)
138
+
139
+ return mask
140
+
141
+ def _combine_masks(self, masks):
142
+ """Combine multiple masks using majority voting"""
143
+ height, width = masks[0].shape
144
+ combined = np.zeros((height, width), dtype=np.uint8)
145
+
146
+ # Convert masks to binary (0 or 1)
147
+ binary_masks = [(mask > 127).astype(np.uint8) for mask in masks]
148
+
149
+ # Sum all masks
150
+ mask_sum = np.sum(binary_masks, axis=0)
151
+
152
+ # Use majority voting (at least 2 out of 3 methods agree)
153
+ combined[mask_sum >= 2] = 255
154
+
155
+ return combined
156
+
157
+ def _post_process_mask(self, mask):
158
+ """Clean up the mask using morphological operations"""
159
+ # Remove small noise
160
+ kernel_small = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
161
+ cleaned = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel_small)
162
+
163
+ # Fill small holes
164
+ kernel_large = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (15, 15))
165
+ cleaned = cv2.morphologyEx(cleaned, cv2.MORPH_CLOSE, kernel_large)
166
+
167
+ # Find the largest connected component (should be the main fruit)
168
+ num_labels, labels, stats, _ = cv2.connectedComponentsWithStats(cleaned, connectivity=8)
169
+
170
+ if num_labels > 1:
171
+ # Find largest component (excluding background)
172
+ largest_label = 1 + np.argmax(stats[1:, cv2.CC_STAT_AREA])
173
+
174
+ # Create mask with only the largest component
175
+ final_mask = np.zeros_like(cleaned)
176
+ final_mask[labels == largest_label] = 255
177
+ else:
178
+ final_mask = cleaned
179
+
180
+ return final_mask
181
+
182
+ def preprocess_image(self, image):
183
+ """Preprocess the input image for disease detection"""
184
+ # Convert to different color spaces for better analysis
185
+ hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
186
+ lab = cv2.cvtColor(image, cv2.COLOR_BGR2LAB)
187
+
188
+ # Apply Gaussian blur to reduce noise
189
+ blurred = cv2.GaussianBlur(image, (5, 5), 0)
190
+
191
+ return blurred, hsv, lab
192
+
193
+ def detect_diseased_areas(self, image, fruit_mask=None):
194
+ """
195
+ Detect mango diseases using color-based segmentation and texture analysis
196
+ Calibrated for: Alternaria, Anthracnose, Aspergillus (Black Mould), Lasiodiplodia (Stem Rot)
197
+ """
198
+ blurred, hsv, lab = self.preprocess_image(image)
199
+
200
+ # Method 1: Detect Anthracnose (dark circular spots with orange/pink halos)
201
+ disease_mask1 = self._detect_anthracnose(hsv, lab)
202
+
203
+ # Method 2: Detect Alternaria (brown/black irregular spots)
204
+ disease_mask2 = self._detect_alternaria(hsv, lab)
205
+
206
+ # Method 3: Detect Aspergillus (Black Mould Rot - dark, fuzzy patches)
207
+ disease_mask3 = self._detect_aspergillus(hsv, lab)
208
+
209
+ # Method 4: Detect Lasiodiplodia (Stem and Rot - soft, dark areas)
210
+ disease_mask4 = self._detect_lasiodiplodia(hsv, lab)
211
+
212
+ # Method 5: General texture-based detection for rough/irregular surfaces
213
+ disease_mask5 = self._detect_texture_anomalies(blurred)
214
+
215
+ # Method 6: Edge-based detection for irregular boundaries
216
+ disease_mask6 = self._detect_irregular_edges(blurred)
217
+
218
+ # Combine disease-specific methods first (higher confidence)
219
+ primary_disease_mask = cv2.bitwise_or(disease_mask1, disease_mask2)
220
+ primary_disease_mask = cv2.bitwise_or(primary_disease_mask, disease_mask3)
221
+ primary_disease_mask = cv2.bitwise_or(primary_disease_mask, disease_mask4)
222
+
223
+ # Secondary detection (texture and edges) - use only if there's significant evidence
224
+ secondary_mask = cv2.bitwise_and(disease_mask5, disease_mask6)
225
+
226
+ # Combine primary and secondary with different weights
227
+ combined_mask = cv2.bitwise_or(primary_disease_mask, secondary_mask)
228
+
229
+ # Apply fruit mask to limit detection to mango area only
230
+ if fruit_mask is not None:
231
+ combined_mask = cv2.bitwise_and(combined_mask, fruit_mask)
232
+
233
+ # Post-processing: More aggressive noise removal for better accuracy
234
+ kernel_small = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
235
+ kernel_medium = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
236
+
237
+ # Remove small noise (more aggressive)
238
+ combined_mask = cv2.morphologyEx(combined_mask, cv2.MORPH_OPEN, kernel_small)
239
+ combined_mask = cv2.morphologyEx(combined_mask, cv2.MORPH_OPEN, kernel_medium)
240
+
241
+ # Fill small holes but not too aggressively
242
+ kernel_close = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
243
+ combined_mask = cv2.morphologyEx(combined_mask, cv2.MORPH_CLOSE, kernel_close)
244
+
245
+ # Filter out very small regions (likely noise)
246
+ contours, _ = cv2.findContours(combined_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
247
+ filtered_mask = np.zeros_like(combined_mask)
248
+ min_area = 75 # Balanced minimum area for good detection
249
+
250
+ for contour in contours:
251
+ if cv2.contourArea(contour) >= min_area:
252
+ cv2.fillPoly(filtered_mask, [contour], 255)
253
+
254
+ return filtered_mask
255
+
256
+ def _detect_anthracnose(self, hsv, lab):
257
+ """Detect Anthracnose disease - dark circular spots with orange/pink halos"""
258
+ # Anthracnose characteristics: Dark centers with lighter halos
259
+ # Use both HSV and LAB for better detection
260
+
261
+ # Dark spots in HSV (less conservative thresholds)
262
+ lower_dark = np.array([0, 40, 0])
263
+ upper_dark = np.array([180, 255, 80])
264
+ dark_mask = cv2.inRange(hsv, lower_dark, upper_dark)
265
+
266
+ # Reddish-brown areas (anthracnose lesions) - broader range
267
+ lower_reddish = np.array([0, 50, 15])
268
+ upper_reddish = np.array([25, 255, 140])
269
+ reddish_mask = cv2.inRange(hsv, lower_reddish, upper_reddish)
270
+
271
+ # LAB color space for better brown/dark detection
272
+ l_channel = lab[:, :, 0]
273
+ a_channel = lab[:, :, 1]
274
+
275
+ # Less conservative dark areas with reddish tint
276
+ _, dark_lab = cv2.threshold(l_channel, 85, 255, cv2.THRESH_BINARY_INV)
277
+ _, red_lab = cv2.threshold(a_channel, 135, 255, cv2.THRESH_BINARY)
278
+
279
+ lab_anthracnose = cv2.bitwise_and(dark_lab, red_lab)
280
+
281
+ # Combine HSV and LAB detections - use OR instead of AND for better sensitivity
282
+ anthracnose_mask = cv2.bitwise_or(dark_mask, reddish_mask)
283
+ anthracnose_mask = cv2.bitwise_or(anthracnose_mask, lab_anthracnose)
284
+
285
+ return anthracnose_mask
286
+
287
+ def _detect_alternaria(self, hsv, lab):
288
+ """Detect Alternaria disease - brown/black irregular spots with concentric rings"""
289
+ # Alternaria has characteristic brown to black lesions
290
+
291
+ # Less conservative brown to black spots in HSV
292
+ lower_brown1 = np.array([5, 60, 5])
293
+ upper_brown1 = np.array([25, 255, 100])
294
+
295
+ lower_brown2 = np.array([0, 40, 3])
296
+ upper_brown2 = np.array([20, 255, 80])
297
+
298
+ brown_mask1 = cv2.inRange(hsv, lower_brown1, upper_brown1)
299
+ brown_mask2 = cv2.inRange(hsv, lower_brown2, upper_brown2)
300
+
301
+ # Very dark areas (advanced alternaria) - less conservative
302
+ lower_black = np.array([0, 30, 0])
303
+ upper_black = np.array([180, 255, 50])
304
+ black_mask = cv2.inRange(hsv, lower_black, upper_black)
305
+
306
+ # LAB space detection for brown lesions - less strict
307
+ l_channel = lab[:, :, 0]
308
+ _, dark_lab = cv2.threshold(l_channel, 70, 255, cv2.THRESH_BINARY_INV)
309
+
310
+ # Combine alternaria indicators - use OR for better sensitivity
311
+ alternaria_mask = cv2.bitwise_or(brown_mask1, brown_mask2)
312
+ alternaria_mask = cv2.bitwise_or(alternaria_mask, black_mask)
313
+ alternaria_mask = cv2.bitwise_or(alternaria_mask, dark_lab)
314
+
315
+ return alternaria_mask
316
+
317
+ def _detect_aspergillus(self, hsv, lab):
318
+ """Detect Aspergillus (Black Mould Rot) - dark, fuzzy patches with irregular borders"""
319
+ # Aspergillus appears as very dark, irregular patches with possible greenish tints
320
+
321
+ # Very dark areas in HSV - less conservative
322
+ lower_black = np.array([0, 20, 0])
323
+ upper_black = np.array([180, 255, 40])
324
+ black_mask = cv2.inRange(hsv, lower_black, upper_black)
325
+
326
+ # Dark greenish areas (aspergillus can have greenish tint) - broader range
327
+ lower_dark_green = np.array([40, 40, 3])
328
+ upper_dark_green = np.array([80, 255, 60])
329
+ dark_green_mask = cv2.inRange(hsv, lower_dark_green, upper_dark_green)
330
+
331
+ # Dark bluish-green areas (specific to aspergillus) - broader range
332
+ lower_blue_green = np.array([85, 30, 5])
333
+ upper_blue_green = np.array([120, 255, 80])
334
+ blue_green_mask = cv2.inRange(hsv, lower_blue_green, upper_blue_green)
335
+
336
+ # LAB space for very dark areas - less conservative
337
+ l_channel = lab[:, :, 0]
338
+ b_channel = lab[:, :, 2]
339
+
340
+ _, very_dark_lab = cv2.threshold(l_channel, 50, 255, cv2.THRESH_BINARY_INV)
341
+
342
+ # Blue-green tint in LAB space (aspergillus characteristic) - broader range
343
+ _, blue_tint = cv2.threshold(b_channel, 110, 255, cv2.THRESH_BINARY_INV)
344
+
345
+ lab_aspergillus = cv2.bitwise_and(very_dark_lab, blue_tint)
346
+
347
+ # Combine aspergillus indicators - use OR for better sensitivity
348
+ aspergillus_mask = cv2.bitwise_or(black_mask, very_dark_lab)
349
+ aspergillus_mask = cv2.bitwise_or(aspergillus_mask, dark_green_mask)
350
+ aspergillus_mask = cv2.bitwise_or(aspergillus_mask, blue_green_mask)
351
+ aspergillus_mask = cv2.bitwise_or(aspergillus_mask, lab_aspergillus)
352
+
353
+ return aspergillus_mask
354
+
355
+ def _detect_lasiodiplodia(self, hsv, lab):
356
+ """Detect Lasiodiplodia (Stem and Rot disease) - soft, dark areas with brown/black coloration"""
357
+ # Lasiodiplodia characteristics: dark, soft areas with brown/black coloration
358
+
359
+ # Brown rot areas - less conservative
360
+ lower_rot = np.array([8, 50, 10])
361
+ upper_rot = np.array([30, 255, 120])
362
+ rot_mask = cv2.inRange(hsv, lower_rot, upper_rot)
363
+
364
+ # Dark spots with higher saturation (active rot) - less strict
365
+ lower_active_rot = np.array([0, 60, 5])
366
+ upper_active_rot = np.array([20, 255, 100])
367
+ active_rot_mask = cv2.inRange(hsv, lower_active_rot, upper_active_rot)
368
+
369
+ # Very dark areas (advanced lasiodiplodia) - broader range
370
+ lower_very_dark = np.array([0, 30, 0])
371
+ upper_very_dark = np.array([30, 255, 60])
372
+ very_dark_mask = cv2.inRange(hsv, lower_very_dark, upper_very_dark)
373
+
374
+ # LAB space for detecting rot areas - less conservative
375
+ l_channel = lab[:, :, 0]
376
+ a_channel = lab[:, :, 1]
377
+
378
+ _, dark_rot = cv2.threshold(l_channel, 70, 255, cv2.THRESH_BINARY_INV)
379
+ _, red_tint = cv2.threshold(a_channel, 130, 255, cv2.THRESH_BINARY)
380
+
381
+ lab_rot = cv2.bitwise_and(dark_rot, red_tint)
382
+
383
+ # Combine lasiodiplodia indicators - use OR for better sensitivity
384
+ lasiodiplodia_mask = cv2.bitwise_or(rot_mask, active_rot_mask)
385
+ lasiodiplodia_mask = cv2.bitwise_or(lasiodiplodia_mask, very_dark_mask)
386
+ lasiodiplodia_mask = cv2.bitwise_or(lasiodiplodia_mask, lab_rot)
387
+
388
+ return lasiodiplodia_mask
389
+
390
+ def _detect_black_mould_rot(self, hsv, lab):
391
+ """Detect Black Mould Rot - dark, fuzzy patches with irregular borders (legacy method)"""
392
+ # This method is now replaced by _detect_aspergillus but kept for compatibility
393
+ return self._detect_aspergillus(hsv, lab)
394
+
395
+ def _detect_stem_rot(self, hsv, lab):
396
+ """Detect Stem and Rot disease - soft, dark areas typically near stem end (legacy method)"""
397
+ # This method is now replaced by _detect_lasiodiplodia but kept for compatibility
398
+ return self._detect_lasiodiplodia(hsv, lab)
399
+
400
+ def _detect_brown_spots(self, hsv):
401
+ """Detect brown/dark spots typical of fruit diseases (legacy method)"""
402
+ # Define HSV range for brown/dark diseased areas
403
+ # Brown spots: Low saturation, low value, wide hue range
404
+ lower_brown = np.array([0, 20, 20])
405
+ upper_brown = np.array([30, 255, 120])
406
+
407
+ # Create mask for brown areas
408
+ brown_mask1 = cv2.inRange(hsv, lower_brown, upper_brown)
409
+
410
+ # Additional brown range (reddish-brown)
411
+ lower_brown2 = np.array([150, 20, 20])
412
+ upper_brown2 = np.array([180, 255, 120])
413
+ brown_mask2 = cv2.inRange(hsv, lower_brown2, upper_brown2)
414
+
415
+ # Combine brown masks
416
+ brown_mask = cv2.bitwise_or(brown_mask1, brown_mask2)
417
+
418
+ return brown_mask
419
+
420
+ def _detect_dark_spots_lab(self, lab):
421
+ """Detect dark spots using LAB color space"""
422
+ l_channel = lab[:, :, 0]
423
+ a_channel = lab[:, :, 1]
424
+
425
+ # Dark areas have low L values
426
+ _, dark_mask = cv2.threshold(l_channel, 80, 255, cv2.THRESH_BINARY_INV)
427
+
428
+ # Areas with high 'a' values (reddish) combined with darkness
429
+ _, red_mask = cv2.threshold(a_channel, 140, 255, cv2.THRESH_BINARY)
430
+
431
+ # Combine dark and reddish areas
432
+ lab_mask = cv2.bitwise_and(dark_mask, red_mask)
433
+
434
+ return lab_mask
435
+
436
+ def _detect_texture_anomalies(self, image):
437
+ """Detect texture anomalies using local binary patterns concept (calibrated for mango)"""
438
+ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
439
+
440
+ # Calculate local standard deviation (texture measure) - optimized for mango texture
441
+ kernel = np.ones((7, 7), np.float32) / 49 # Slightly smaller kernel for mango details
442
+ gray_float = gray.astype(np.float32)
443
+ mean = cv2.filter2D(gray_float, -1, kernel)
444
+ sqr_mean = cv2.filter2D(gray_float**2, -1, kernel)
445
+
446
+ # Ensure variance is non-negative (handle floating point precision errors)
447
+ variance = sqr_mean - mean**2
448
+ variance = np.maximum(variance, 0) # Clamp negative values to 0
449
+ std_dev = np.sqrt(variance)
450
+
451
+ # Handle NaN and infinity values
452
+ std_dev = np.nan_to_num(std_dev, nan=0.0, posinf=255.0, neginf=0.0)
453
+
454
+ # Normalize to 0-255 range
455
+ if std_dev.max() > 0:
456
+ std_dev = (std_dev / std_dev.max() * 255).astype(np.uint8)
457
+ else:
458
+ std_dev = std_dev.astype(np.uint8)
459
+
460
+ # Higher threshold for mango diseases (less sensitive to normal texture)
461
+ _, texture_mask = cv2.threshold(std_dev, 30, 255, cv2.THRESH_BINARY)
462
+
463
+ # Additional texture analysis for mango-specific roughness
464
+ # Use Sobel operators to detect rough areas - more conservative
465
+ sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=3)
466
+ sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=3)
467
+ sobel_magnitude = np.sqrt(sobelx**2 + sobely**2)
468
+ sobel_magnitude = np.uint8(sobel_magnitude / sobel_magnitude.max() * 255)
469
+
470
+ # Higher threshold for rough texture detection (less sensitive)
471
+ _, rough_mask = cv2.threshold(sobel_magnitude, 50, 255, cv2.THRESH_BINARY)
472
+
473
+ # Combine standard deviation and sobel-based texture detection
474
+ # Use AND operation to require both methods to agree (more conservative)
475
+ final_texture_mask = cv2.bitwise_and(texture_mask, rough_mask)
476
+
477
+ return final_texture_mask
478
+
479
+ def _detect_irregular_edges(self, image):
480
+ """Detect irregular edges that might indicate disease boundaries"""
481
+ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
482
+
483
+ # Apply Canny edge detection
484
+ edges = cv2.Canny(gray, 50, 150)
485
+
486
+ # Dilate edges to create regions
487
+ kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
488
+ dilated_edges = cv2.dilate(edges, kernel, iterations=2)
489
+
490
+ return dilated_edges
491
+
492
+ def calculate_disease_severity(self, image, disease_mask, fruit_mask):
493
+ """Calculate disease severity as percentage of affected area"""
494
+ if fruit_mask is not None:
495
+ # Use provided fruit mask
496
+ fruit_area = cv2.countNonZero(fruit_mask)
497
+ else:
498
+ # Fallback: create fruit mask (assuming fruit takes up most of the image)
499
+ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
500
+ _, fruit_mask = cv2.threshold(gray, 30, 255, cv2.THRESH_BINARY)
501
+ fruit_area = cv2.countNonZero(fruit_mask)
502
+
503
+ # Calculate disease area (only within fruit region)
504
+ if fruit_mask is not None:
505
+ disease_in_fruit = cv2.bitwise_and(disease_mask, fruit_mask)
506
+ disease_area = cv2.countNonZero(disease_in_fruit)
507
+ else:
508
+ disease_area = cv2.countNonZero(disease_mask)
509
+
510
+ if fruit_area == 0:
511
+ return 0
512
+
513
+ severity_percentage = (disease_area / fruit_area) * 100
514
+ return min(severity_percentage, 100) # Cap at 100%
515
+
516
+ def classify_disease_level(self, severity_percentage):
517
+ """Classify mango disease level based on severity percentage (calibrated for mango)"""
518
+ if severity_percentage < 2:
519
+ return "Healthy", (0, 255, 0) # Green - very strict for healthy
520
+ elif severity_percentage < 8:
521
+ return "Early Disease", (0, 255, 255) # Yellow - early detection
522
+ elif severity_percentage < 20:
523
+ return "Moderate Disease", (0, 165, 255) # Orange - moderate infection
524
+ elif severity_percentage < 40:
525
+ return "Severe Disease", (0, 100, 255) # Red-Orange - severe infection
526
+ else:
527
+ return "Critical Disease", (0, 0, 255) # Red - critical, unmarketable
528
+
529
+ def process_image(self, image_path):
530
+ """Main processing function with background removal (calibrated for mango diseases)"""
531
+ # Load image
532
+ image = cv2.imread(image_path)
533
+ if image is None:
534
+ raise ValueError("Could not load image")
535
+
536
+ print("Step 1: Removing background (mango-optimized)...")
537
+ # Remove background first
538
+ fruit_mask, image_no_bg = self.remove_background(image)
539
+
540
+ print("Step 2: Detecting mango diseases (Alternaria, Anthracnose, Aspergillus, Lasiodiplodia)...")
541
+ # Detect diseased areas (only within mango region)
542
+ disease_mask = self.detect_diseased_areas(image_no_bg, fruit_mask)
543
+
544
+ print("Step 3: Calculating mango disease severity...")
545
+ # Calculate severity
546
+ severity = self.calculate_disease_severity(image_no_bg, disease_mask, fruit_mask)
547
+ disease_level, color = self.classify_disease_level(severity)
548
+
549
+ print("Step 4: Creating mango disease visualization...")
550
+ # Create output visualization with bounding boxes
551
+ output_image, disease_info = self._create_output_visualization(image, disease_mask, severity, disease_level, color, fruit_mask)
552
+
553
+ # Store results
554
+ self.disease_mask = disease_mask
555
+ self.processed_image = output_image
556
+ self.fruit_mask = fruit_mask
557
+ self.image_no_bg = image_no_bg
558
+ self.disease_info = disease_info
559
+
560
+ return {
561
+ 'severity_percentage': severity,
562
+ 'disease_level': disease_level,
563
+ 'disease_mask': disease_mask,
564
+ 'output_image': output_image,
565
+ 'fruit_mask': fruit_mask,
566
+ 'image_no_bg': image_no_bg,
567
+ 'disease_info': disease_info,
568
+ 'num_diseased_regions': len(disease_info)
569
+ }
570
+
571
+ def _create_output_visualization(self, original, mask, severity, level, color, fruit_mask=None):
572
+ """Create visualization showing detected diseased areas with bounding boxes"""
573
+ # Start with original image
574
+ result = original.copy()
575
+
576
+ # If we have a fruit mask, dim the background
577
+ if fruit_mask is not None:
578
+ background = np.zeros_like(original)
579
+ result = np.where(fruit_mask[..., np.newaxis] == 0,
580
+ background, result)
581
+
582
+ # Create colored overlay for diseased areas
583
+ overlay = result.copy()
584
+ disease_info = []
585
+
586
+ if np.any(mask > 0): # Only if there are diseased areas
587
+ overlay[mask > 0] = color
588
+
589
+ # Blend original image with overlay
590
+ result = cv2.addWeighted(result, 0.7, overlay, 0.3, 0)
591
+
592
+ # Find contours and draw bounding boxes
593
+ contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
594
+
595
+ # Filter contours by minimum area to avoid tiny noise
596
+ min_area = 50 # Minimum area threshold
597
+ valid_contours = [cnt for cnt in contours if cv2.contourArea(cnt) >= min_area]
598
+
599
+ # Draw contours and bounding boxes
600
+ for i, contour in enumerate(valid_contours):
601
+ # Draw contour outline
602
+ cv2.drawContours(result, [contour], -1, color, 2)
603
+
604
+ # Get bounding rectangle
605
+ x, y, w, h = cv2.boundingRect(contour)
606
+
607
+ # Draw bounding box
608
+ cv2.rectangle(result, (x, y), (x + w, y + h), color, 3)
609
+
610
+ # Calculate area of this diseased region
611
+ area = cv2.contourArea(contour)
612
+ disease_info.append({
613
+ 'id': i + 1,
614
+ 'bbox': (x, y, w, h),
615
+ 'area': area,
616
+ 'center': (x + w//2, y + h//2)
617
+ })
618
+
619
+ # Add label with disease ID
620
+ label = f"D{i+1}"
621
+ label_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2)[0]
622
+
623
+ # Position label above bounding box
624
+ label_x = x
625
+ label_y = y - 10 if y - 10 > 20 else y + h + 25
626
+
627
+ # Draw label background
628
+ cv2.rectangle(result,
629
+ (label_x - 2, label_y - label_size[1] - 2),
630
+ (label_x + label_size[0] + 2, label_y + 2),
631
+ color, -1)
632
+
633
+ # Draw label text
634
+ cv2.putText(result, label, (label_x, label_y),
635
+ cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 2)
636
+
637
+ # Add overall statistics
638
+ num_diseases = len(disease_info)
639
+ text1 = f"{level}: {severity:.1f}%"
640
+ text2 = f"Diseased Regions: {num_diseases}"
641
+
642
+ # Main status text
643
+ cv2.putText(result, text1, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, color, 2)
644
+ cv2.putText(result, text2, (10, 70), cv2.FONT_HERSHEY_SIMPLEX, 0.8, color, 2)
645
+
646
+ # Add fruit outline
647
+ if fruit_mask is not None:
648
+ fruit_contours, _ = cv2.findContours(fruit_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
649
+ cv2.drawContours(result, fruit_contours, -1, (255, 255, 255), 2)
650
+
651
+ return result, disease_info
652
+
653
+ def save_detailed_report(self, output_path, results):
654
+ """Save detailed report of disease detection results"""
655
+ report_path = output_path.replace('.jpg', '_report.txt').replace('.png', '_report.txt')
656
+
657
+ with open(report_path, 'w') as f:
658
+ f.write("=== FRUIT DISEASE DETECTION REPORT ===\n")
659
+ f.write(f"Image processed: {output_path}\n")
660
+ f.write(f"Overall Disease Level: {results['disease_level']}\n")
661
+ f.write(f"Overall Severity: {results['severity_percentage']:.2f}%\n")
662
+ f.write(f"Number of Diseased Regions: {results['num_diseased_regions']}\n")
663
+ f.write("\n=== INDIVIDUAL DISEASE REGIONS ===\n")
664
+
665
+ if results['disease_info']:
666
+ for disease in results['disease_info']:
667
+ f.write(f"\nDisease Region D{disease['id']}:\n")
668
+ f.write(f" - Bounding Box: x={disease['bbox'][0]}, y={disease['bbox'][1]}, ")
669
+ f.write(f"width={disease['bbox'][2]}, height={disease['bbox'][3]}\n")
670
+ f.write(f" - Area: {disease['area']:.0f} pixels\n")
671
+ f.write(f" - Center: ({disease['center'][0]}, {disease['center'][1]})\n")
672
+ else:
673
+ f.write("No diseased regions detected.\n")
674
+
675
+ print(f"Detailed report saved: {report_path}")
676
+ return report_path
677
+
678
+ def save_results(self, output_path, include_mask=True, include_background_removed=True):
679
+ """Save processing results"""
680
+ if self.processed_image is not None:
681
+ cv2.imwrite(output_path, self.processed_image)
682
+ print(f"Main result saved: {output_path}")
683
+
684
+ if include_mask and self.disease_mask is not None:
685
+ mask_path = output_path.replace('.', '_disease_mask.')
686
+ cv2.imwrite(mask_path, self.disease_mask)
687
+ print(f"Disease mask saved: {mask_path}")
688
+
689
+ if include_background_removed and hasattr(self, 'image_no_bg') and self.image_no_bg is not None:
690
+ bg_removed_path = output_path.replace('.', '_no_background.')
691
+ cv2.imwrite(bg_removed_path, self.image_no_bg)
692
+ print(f"Background removed image saved: {bg_removed_path}")
693
+
694
+ if hasattr(self, 'fruit_mask') and self.fruit_mask is not None:
695
+ fruit_mask_path = output_path.replace('.', '_fruit_mask.')
696
+ cv2.imwrite(fruit_mask_path, self.fruit_mask)
697
+ print(f"Fruit mask saved: {fruit_mask_path}")
698
+
699
+ # Example usage and testing function
700
+ def demonstrate_disease_detection():
701
+ """Demonstrate the mango disease detection algorithm"""
702
+ detector = FruitDiseaseDetector()
703
+
704
+ print("Mango Disease Detection Algorithm with Background Removal & Bounding Boxes")
705
+ print("========================================================================")
706
+ print("This algorithm detects mango diseases using:")
707
+ print("1. Mango-optimized background removal (GrabCut + Color + Edge detection)")
708
+ print("2. Anthracnose detection (dark circular spots with orange/pink halos)")
709
+ print("3. Alternaria detection (brown/black irregular spots with concentric rings)")
710
+ print("4. Aspergillus detection (black mould with greenish tints)")
711
+ print("5. Lasiodiplodia detection (stem rot with brown/black soft areas)")
712
+ print("6. Mango-calibrated texture analysis for rough surfaces")
713
+ print("7. Edge detection for irregular disease boundaries")
714
+ print("8. Individual disease region bounding boxes")
715
+ print()
716
+
717
+ print("Algorithm Features for Mango:")
718
+ print("- Multi-method background removal optimized for mango shape")
719
+ print("- Disease-specific detection for common mango diseases")
720
+ print("- Individual region labeling (D1, D2, D3, etc.)")
721
+ print("- Mango-specific severity assessment (Healthy < 2%, Early < 8%, etc.)")
722
+ print("- Comprehensive reporting for mango disease management")
723
+ print("- Calibrated for mango color variations (green, yellow, orange, red)")
724
+ print("- Specialized detection for Aspergillus (Black Mould) and Lasiodiplodia (Stem Rot)")
725
+
726
+ return detector
727
+
728
+ # Batch testing function for algorithm validation
729
+ def test_mango_detection_algorithm():
730
+ """Test the algorithm on multiple mango samples"""
731
+ detector = FruitDiseaseDetector()
732
+
733
+ # Test cases with expected results - using correct file names
734
+ test_cases = [
735
+ ("SenMangoFruitDDS_bgremoved/Healthy/healthy_003.jpg", "Should be Healthy"),
736
+ ("SenMangoFruitDDS_bgremoved/Healthy/healthy_010.jpg", "Should be Healthy"),
737
+ ("SenMangoFruitDDS_bgremoved/Alternaria/Alternaria_005.jpg", "Should detect Alternaria"),
738
+ ("SenMangoFruitDDS_bgremoved/Alternaria/Alternaria_010.jpg", "Should detect Alternaria"),
739
+ ("SenMangoFruitDDS_bgremoved/Anthracnose/Anthracnose_002.jpg", "Should detect Anthracnose"),
740
+ ("SenMangoFruitDDS_bgremoved/Anthracnose/Anthracnose_010.jpg", "Should detect Anthracnose"),
741
+ ("SenMangoFruitDDS_bgremoved/Black Mould Rot/Aspergillus_001.jpg", "Should detect Aspergillus (Black Mould)"),
742
+ ("SenMangoFruitDDS_bgremoved/Black Mould Rot/Aspergillus_010.jpg", "Should detect Aspergillus (Black Mould)"),
743
+ ("SenMangoFruitDDS_bgremoved/Stem and Rot/Lasiodiplodia_001.jpg", "Should detect Lasiodiplodia (Stem Rot)"),
744
+ ("SenMangoFruitDDS_bgremoved/Stem and Rot/Lasiodiplodia_012.jpg", "Should detect Lasiodiplodia (Stem Rot)"),
745
+ ]
746
+
747
+ print("=== MANGO DISEASE DETECTION ALGORITHM VALIDATION ===")
748
+ print("Testing multiple samples to validate algorithm performance")
749
+ print("Diseases: Alternaria, Anthracnose, Aspergillus (Black Mould), Lasiodiplodia (Stem Rot)")
750
+ print("=" * 80)
751
+
752
+ results_summary = []
753
+
754
+ for i, (image_path, expected) in enumerate(test_cases, 1):
755
+ print(f"\nTest {i}: {image_path}")
756
+ print(f"Expected: {expected}")
757
+
758
+ try:
759
+ results = detector.process_image(image_path)
760
+ output_path = f"test_result_{i}.jpg"
761
+
762
+ result_info = {
763
+ 'test_id': i,
764
+ 'image_path': image_path,
765
+ 'expected': expected,
766
+ 'detected_level': results['disease_level'],
767
+ 'severity': results['severity_percentage'],
768
+ 'num_regions': results['num_diseased_regions']
769
+ }
770
+ results_summary.append(result_info)
771
+
772
+ print(f"Result: {results['disease_level']} ({results['severity_percentage']:.2f}%)")
773
+ print(f"Regions: {results['num_diseased_regions']}")
774
+
775
+ # Save test result
776
+ detector.save_results(output_path, include_mask=True)
777
+
778
+ except Exception as e:
779
+ print(f"Error: {e}")
780
+ results_summary.append({
781
+ 'test_id': i,
782
+ 'image_path': image_path,
783
+ 'expected': expected,
784
+ 'detected_level': 'ERROR',
785
+ 'severity': 0.0,
786
+ 'num_regions': 0
787
+ })
788
+
789
+ print("-" * 50)
790
+
791
+ # Print summary
792
+ print("\n" + "=" * 80)
793
+ print("TESTING SUMMARY")
794
+ print("=" * 80)
795
+ for result in results_summary:
796
+ status = "✓" if result['detected_level'] != 'Healthy' and 'Should detect' in result['expected'] else "✗" if result['detected_level'] == 'Healthy' and 'Should detect' in result['expected'] else "✓"
797
+ print(f"Test {result['test_id']:2d}: {status} {result['detected_level']} ({result['severity']:.1f}%) - {result['expected']}")
798
+
799
+ return results_summary
800
+
801
+ # Usage example:
802
+ if __name__ == "__main__":
803
+ # Initialize mango disease detector
804
+ detector = FruitDiseaseDetector()
805
+
806
+ # Test different disease types - uncomment to test specific diseases:
807
+ # image_path = "SenMangoFruitDDS_bgremoved/Healthy/healthy_003.jpg" # Should be Healthy
808
+ # image_path = "SenMangoFruitDDS_bgremoved/Alternaria/Alternaria_005.jpg" # Should detect Alternaria
809
+ # image_path = "SenMangoFruitDDS_bgremoved/Anthracnose/Anthracnose_002.jpg" # Should detect Anthracnose
810
+ # image_path = "SenMangoFruitDDS_bgremoved/Black Mould Rot/Aspergillus_001.jpg" # Should detect Aspergillus
811
+ image_path = "SenMangoFruitDDS_bgremoved/Stem and Rot/Lasiodiplodia_012.jpg" # Should detect Lasiodiplodia
812
+
813
+ output_path = "mango_disease_detection_result.jpg" # Output file name
814
+
815
+ try:
816
+ print("Processing mango image with calibrated algorithm...")
817
+ print(f"Input: {image_path}")
818
+ print(f"Output: {output_path}")
819
+ print("-" * 60)
820
+
821
+ results = detector.process_image(image_path)
822
+
823
+ print(f"\n=== MANGO DISEASE DETECTION RESULTS ===")
824
+ print(f"Disease Level: {results['disease_level']}")
825
+ print(f"Severity: {results['severity_percentage']:.2f}%")
826
+ print(f"Number of Diseased Regions: {results['num_diseased_regions']}")
827
+
828
+ # Print individual disease information
829
+ if results['disease_info']:
830
+ print(f"\n=== INDIVIDUAL DISEASE REGIONS ===")
831
+ for disease in results['disease_info']:
832
+ print(f"Disease D{disease['id']}: Area={disease['area']:.0f} pixels, "
833
+ f"Center=({disease['center'][0]}, {disease['center'][1]})")
834
+ else:
835
+ print("\n=== MANGO HEALTH STATUS ===")
836
+ print("No significant disease regions detected - mango appears healthy!")
837
+
838
+ # Save results
839
+ detector.save_results(output_path, include_mask=True)
840
+
841
+ # Save detailed report
842
+ detector.save_detailed_report(output_path, results)
843
+
844
+ print(f"\n=== FILES SAVED ===")
845
+ print(f"Main result: {output_path}")
846
+ print(f"Disease mask: {output_path.replace('.', '_disease_mask.')}")
847
+ print(f"Background removed: {output_path.replace('.', '_no_background.')}")
848
+ print(f"Fruit mask: {output_path.replace('.', '_fruit_mask.')}")
849
+ print(f"Detailed report: {output_path.replace('.jpg', '_report.txt')}")
850
+
851
+ print("\nMango disease detection completed successfully!")
852
+ print("The algorithm has been calibrated for mango-specific diseases:")
853
+ print("- Alternaria, Anthracnose, Aspergillus (Black Mould), Lasiodiplodia (Stem Rot)")
854
+
855
+ except FileNotFoundError:
856
+ print(f"Error: Could not find mango image file '{image_path}'")
857
+ print("Please check the file path and make sure the image exists.")
858
+ print("\nAvailable test images:")
859
+ print("- SenMangoFruitDDS_bgremoved/Healthy/healthy_003.jpg")
860
+ print("- SenMangoFruitDDS_bgremoved/Alternaria/Alternaria_005.jpg")
861
+ print("- SenMangoFruitDDS_bgremoved/Anthracnose/Anthracnose_002.jpg")
862
+ except Exception as e:
863
+ print(f"Error processing mango image: {e}")
864
+
865
+ # Uncomment below to see algorithm demonstration
866
+ # demonstrate_disease_detection()
867
+
868
+ # Uncomment below to run batch testing
869
+ # test_mango_detection_algorithm()
src/mango_disease_ontology.py ADDED
@@ -0,0 +1,625 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Mango Disease Detection Semantic Web Ontology
3
+ Integrates OWL-RL reasoning with computer vision disease detection
4
+ """
5
+
6
+ import os
7
+ import json
8
+ from datetime import datetime
9
+ from typing import Dict, List, Tuple, Optional, Any
10
+ import cv2
11
+ import numpy as np
12
+
13
+ # RDF and OWL libraries
14
+ from rdflib import Graph, Namespace, Literal, URIRef, BNode
15
+ from rdflib.namespace import RDF, RDFS, OWL, XSD
16
+ import owlrl
17
+
18
+ # Import our disease detection algorithm
19
+ from app import FruitDiseaseDetector
20
+
21
+ class MangoOntologyManager:
22
+ """
23
+ Semantic Web Ontology Manager for Mango Disease Detection
24
+ Uses OWL-RL reasoning to enhance disease detection with domain knowledge
25
+ """
26
+
27
+ def __init__(self):
28
+ # Initialize RDF graph and namespaces
29
+ self.graph = Graph()
30
+
31
+ # Define namespaces for our ontology
32
+ self.MANGO = Namespace("http://spotradar.org/mango-disease#")
33
+ self.DISEASE = Namespace("http://spotradar.org/disease#")
34
+ self.DETECTION = Namespace("http://spotradar.org/detection#")
35
+ self.VISUAL = Namespace("http://spotradar.org/visual#")
36
+ self.AGRI = Namespace("http://spotradar.org/agriculture#")
37
+
38
+ # Bind namespaces to graph
39
+ self.graph.bind("mango", self.MANGO)
40
+ self.graph.bind("disease", self.DISEASE)
41
+ self.graph.bind("detection", self.DETECTION)
42
+ self.graph.bind("visual", self.VISUAL)
43
+ self.graph.bind("agri", self.AGRI)
44
+ self.graph.bind("owl", OWL)
45
+ self.graph.bind("rdfs", RDFS)
46
+
47
+ # Initialize disease detector
48
+ self.detector = FruitDiseaseDetector()
49
+
50
+ # Build the ontology
51
+ self._build_ontology()
52
+
53
+ # Apply OWL-RL reasoning
54
+ self._apply_reasoning()
55
+
56
+ def _build_ontology(self):
57
+ """Build the complete mango disease ontology"""
58
+ print("Building mango disease ontology...")
59
+
60
+ # 1. Define top-level classes
61
+ self._define_core_classes()
62
+
63
+ # 2. Define mango disease classes
64
+ self._define_disease_classes()
65
+
66
+ # 3. Define visual characteristics
67
+ self._define_visual_properties()
68
+
69
+ # 4. Define detection properties
70
+ self._define_detection_properties()
71
+
72
+ # 5. Define severity levels
73
+ self._define_severity_levels()
74
+
75
+ # 6. Define causal relationships
76
+ self._define_causal_relationships()
77
+
78
+ # 7. Define temporal aspects
79
+ self._define_temporal_aspects()
80
+
81
+ # 8. Define economic impact
82
+ self._define_economic_impact()
83
+
84
+ print("Ontology structure built successfully!")
85
+
86
+ def _define_core_classes(self):
87
+ """Define fundamental classes in the ontology"""
88
+ # Core classes
89
+ classes = [
90
+ (self.MANGO.Fruit, "Fruit"),
91
+ (self.MANGO.MangoFruit, "Mango Fruit"),
92
+ (self.DISEASE.Disease, "Disease"),
93
+ (self.DISEASE.FungalDisease, "Fungal Disease"),
94
+ (self.DISEASE.BacterialDisease, "Bacterial Disease"),
95
+ (self.VISUAL.VisualCharacteristic, "Visual Characteristic"),
96
+ (self.VISUAL.ColorCharacteristic, "Color Characteristic"),
97
+ (self.VISUAL.TextureCharacteristic, "Texture Characteristic"),
98
+ (self.VISUAL.ShapeCharacteristic, "Shape Characteristic"),
99
+ (self.DETECTION.DetectionResult, "Detection Result"),
100
+ (self.DETECTION.ImageAnalysis, "Image Analysis"),
101
+ (self.AGRI.SeverityLevel, "Severity Level"),
102
+ (self.AGRI.EconomicImpact, "Economic Impact"),
103
+ ]
104
+
105
+ for class_uri, label in classes:
106
+ self.graph.add((class_uri, RDF.type, OWL.Class))
107
+ self.graph.add((class_uri, RDFS.label, Literal(label)))
108
+
109
+ def _define_disease_classes(self):
110
+ """Define specific mango disease classes"""
111
+ # Mango is a subclass of Fruit
112
+ self.graph.add((self.MANGO.MangoFruit, RDFS.subClassOf, self.MANGO.Fruit))
113
+
114
+ # Disease taxonomy
115
+ self.graph.add((self.DISEASE.FungalDisease, RDFS.subClassOf, self.DISEASE.Disease))
116
+ self.graph.add((self.DISEASE.BacterialDisease, RDFS.subClassOf, self.DISEASE.Disease))
117
+
118
+ # Specific mango diseases
119
+ diseases = [
120
+ (self.DISEASE.Alternaria, "Alternaria", self.DISEASE.FungalDisease),
121
+ (self.DISEASE.Anthracnose, "Anthracnose", self.DISEASE.FungalDisease),
122
+ (self.DISEASE.Aspergillus, "Aspergillus (Black Mould Rot)", self.DISEASE.FungalDisease),
123
+ (self.DISEASE.Lasiodiplodia, "Lasiodiplodia (Stem and Rot)", self.DISEASE.FungalDisease),
124
+ ]
125
+
126
+ for disease_uri, label, parent_class in diseases:
127
+ self.graph.add((disease_uri, RDF.type, OWL.Class))
128
+ self.graph.add((disease_uri, RDFS.label, Literal(label)))
129
+ self.graph.add((disease_uri, RDFS.subClassOf, parent_class))
130
+
131
+ def _define_visual_properties(self):
132
+ """Define visual characteristics and properties"""
133
+ # Color properties
134
+ color_chars = [
135
+ (self.VISUAL.DarkBrown, "Dark Brown Color"),
136
+ (self.VISUAL.Black, "Black Color"),
137
+ (self.VISUAL.Orange, "Orange Color"),
138
+ (self.VISUAL.Pink, "Pink Color"),
139
+ (self.VISUAL.Green, "Green Color"),
140
+ (self.VISUAL.Yellow, "Yellow Color"),
141
+ (self.VISUAL.Red, "Red Color"),
142
+ ]
143
+
144
+ for color_uri, label in color_chars:
145
+ self.graph.add((color_uri, RDF.type, OWL.Class))
146
+ self.graph.add((color_uri, RDFS.label, Literal(label)))
147
+ self.graph.add((color_uri, RDFS.subClassOf, self.VISUAL.ColorCharacteristic))
148
+
149
+ # Texture properties
150
+ texture_chars = [
151
+ (self.VISUAL.Smooth, "Smooth Texture"),
152
+ (self.VISUAL.Rough, "Rough Texture"),
153
+ (self.VISUAL.Fuzzy, "Fuzzy Texture"),
154
+ (self.VISUAL.Irregular, "Irregular Texture"),
155
+ (self.VISUAL.Concentric, "Concentric Pattern"),
156
+ ]
157
+
158
+ for texture_uri, label in texture_chars:
159
+ self.graph.add((texture_uri, RDF.type, OWL.Class))
160
+ self.graph.add((texture_uri, RDFS.label, Literal(label)))
161
+ self.graph.add((texture_uri, RDFS.subClassOf, self.VISUAL.TextureCharacteristic))
162
+
163
+ # Shape properties
164
+ shape_chars = [
165
+ (self.VISUAL.Circular, "Circular Shape"),
166
+ (self.VISUAL.Irregular, "Irregular Shape"),
167
+ (self.VISUAL.Oval, "Oval Shape"),
168
+ (self.VISUAL.Elongated, "Elongated Shape"),
169
+ ]
170
+
171
+ for shape_uri, label in shape_chars:
172
+ self.graph.add((shape_uri, RDF.type, OWL.Class))
173
+ self.graph.add((shape_uri, RDFS.label, Literal(label)))
174
+ self.graph.add((shape_uri, RDFS.subClassOf, self.VISUAL.ShapeCharacteristic))
175
+
176
+ def _define_detection_properties(self):
177
+ """Define object and data properties for detection"""
178
+ # Object properties
179
+ properties = [
180
+ (self.DETECTION.hasDisease, "has disease"),
181
+ (self.DETECTION.hasVisualCharacteristic, "has visual characteristic"),
182
+ (self.DETECTION.hasSeverityLevel, "has severity level"),
183
+ (self.DETECTION.detectedIn, "detected in"),
184
+ (self.DETECTION.causedBy, "caused by"),
185
+ (self.DETECTION.affects, "affects"),
186
+ (self.DETECTION.hasSymptom, "has symptom"),
187
+ (self.AGRI.hasEconomicImpact, "has economic impact"),
188
+ ]
189
+
190
+ for prop_uri, label in properties:
191
+ self.graph.add((prop_uri, RDF.type, OWL.ObjectProperty))
192
+ self.graph.add((prop_uri, RDFS.label, Literal(label)))
193
+
194
+ # Data properties
195
+ data_properties = [
196
+ (self.DETECTION.severityPercentage, "severity percentage", XSD.float),
197
+ (self.DETECTION.numberOfRegions, "number of regions", XSD.integer),
198
+ (self.DETECTION.detectionConfidence, "detection confidence", XSD.float),
199
+ (self.DETECTION.imageWidth, "image width", XSD.integer),
200
+ (self.DETECTION.imageHeight, "image height", XSD.integer),
201
+ (self.DETECTION.detectionTimestamp, "detection timestamp", XSD.dateTime),
202
+ (self.VISUAL.hueValue, "hue value", XSD.integer),
203
+ (self.VISUAL.saturationValue, "saturation value", XSD.integer),
204
+ (self.VISUAL.brightnessValue, "brightness value", XSD.integer),
205
+ (self.AGRI.marketabilityScore, "marketability score", XSD.float),
206
+ ]
207
+
208
+ for prop_uri, label, datatype in data_properties:
209
+ self.graph.add((prop_uri, RDF.type, OWL.DatatypeProperty))
210
+ self.graph.add((prop_uri, RDFS.label, Literal(label)))
211
+ self.graph.add((prop_uri, RDFS.range, datatype))
212
+
213
+ def _define_severity_levels(self):
214
+ """Define severity level instances"""
215
+ severity_levels = [
216
+ (self.AGRI.Healthy, "Healthy", 0, 2),
217
+ (self.AGRI.EarlyDisease, "Early Disease", 2, 8),
218
+ (self.AGRI.ModerateDisease, "Moderate Disease", 8, 20),
219
+ (self.AGRI.SevereDisease, "Severe Disease", 20, 40),
220
+ (self.AGRI.CriticalDisease, "Critical Disease", 40, 100),
221
+ ]
222
+
223
+ for level_uri, label, min_percent, max_percent in severity_levels:
224
+ self.graph.add((level_uri, RDF.type, self.AGRI.SeverityLevel))
225
+ self.graph.add((level_uri, RDFS.label, Literal(label)))
226
+ self.graph.add((level_uri, self.DETECTION.severityPercentage, Literal(min_percent, datatype=XSD.float)))
227
+ self.graph.add((level_uri, self.DETECTION.severityPercentage, Literal(max_percent, datatype=XSD.float)))
228
+
229
+ def _define_causal_relationships(self):
230
+ """Define disease characteristics and causal relationships"""
231
+ # Alternaria characteristics
232
+ alternaria_symptoms = [
233
+ (self.DISEASE.Alternaria, self.DETECTION.hasSymptom, self.VISUAL.DarkBrown),
234
+ (self.DISEASE.Alternaria, self.DETECTION.hasSymptom, self.VISUAL.Black),
235
+ (self.DISEASE.Alternaria, self.DETECTION.hasSymptom, self.VISUAL.Irregular),
236
+ (self.DISEASE.Alternaria, self.DETECTION.hasSymptom, self.VISUAL.Concentric),
237
+ ]
238
+
239
+ # Anthracnose characteristics
240
+ anthracnose_symptoms = [
241
+ (self.DISEASE.Anthracnose, self.DETECTION.hasSymptom, self.VISUAL.Black),
242
+ (self.DISEASE.Anthracnose, self.DETECTION.hasSymptom, self.VISUAL.Orange),
243
+ (self.DISEASE.Anthracnose, self.DETECTION.hasSymptom, self.VISUAL.Pink),
244
+ (self.DISEASE.Anthracnose, self.DETECTION.hasSymptom, self.VISUAL.Circular),
245
+ ]
246
+
247
+ # Aspergillus characteristics
248
+ aspergillus_symptoms = [
249
+ (self.DISEASE.Aspergillus, self.DETECTION.hasSymptom, self.VISUAL.Black),
250
+ (self.DISEASE.Aspergillus, self.DETECTION.hasSymptom, self.VISUAL.Green),
251
+ (self.DISEASE.Aspergillus, self.DETECTION.hasSymptom, self.VISUAL.Fuzzy),
252
+ (self.DISEASE.Aspergillus, self.DETECTION.hasSymptom, self.VISUAL.Irregular),
253
+ ]
254
+
255
+ # Lasiodiplodia characteristics
256
+ lasiodiplodia_symptoms = [
257
+ (self.DISEASE.Lasiodiplodia, self.DETECTION.hasSymptom, self.VISUAL.DarkBrown),
258
+ (self.DISEASE.Lasiodiplodia, self.DETECTION.hasSymptom, self.VISUAL.Black),
259
+ (self.DISEASE.Lasiodiplodia, self.DETECTION.hasSymptom, self.VISUAL.Irregular),
260
+ ]
261
+
262
+ all_symptoms = alternaria_symptoms + anthracnose_symptoms + aspergillus_symptoms + lasiodiplodia_symptoms
263
+
264
+ for subject, predicate, obj in all_symptoms:
265
+ self.graph.add((subject, predicate, obj))
266
+
267
+ def _define_temporal_aspects(self):
268
+ """Define temporal progression of diseases"""
269
+ # Disease progression stages
270
+ stages = [
271
+ (self.DISEASE.EarlyStage, "Early Stage"),
272
+ (self.DISEASE.DevelopingStage, "Developing Stage"),
273
+ (self.DISEASE.AdvancedStage, "Advanced Stage"),
274
+ (self.DISEASE.CriticalStage, "Critical Stage"),
275
+ ]
276
+
277
+ for stage_uri, label in stages:
278
+ self.graph.add((stage_uri, RDF.type, OWL.Class))
279
+ self.graph.add((stage_uri, RDFS.label, Literal(label)))
280
+ self.graph.add((stage_uri, RDFS.subClassOf, self.DISEASE.Disease))
281
+
282
+ def _define_economic_impact(self):
283
+ """Define economic impact levels"""
284
+ impact_levels = [
285
+ (self.AGRI.NoImpact, "No Economic Impact", 100),
286
+ (self.AGRI.MinimalImpact, "Minimal Impact", 85),
287
+ (self.AGRI.ModerateImpact, "Moderate Impact", 60),
288
+ (self.AGRI.SevereImpact, "Severe Impact", 30),
289
+ (self.AGRI.CriticalImpact, "Critical Impact", 0),
290
+ ]
291
+
292
+ for impact_uri, label, marketability in impact_levels:
293
+ self.graph.add((impact_uri, RDF.type, self.AGRI.EconomicImpact))
294
+ self.graph.add((impact_uri, RDFS.label, Literal(label)))
295
+ self.graph.add((impact_uri, self.AGRI.marketabilityScore, Literal(marketability, datatype=XSD.float)))
296
+
297
+ def _apply_reasoning(self):
298
+ """Apply OWL-RL reasoning to the ontology"""
299
+ print("Applying OWL-RL reasoning...")
300
+
301
+ # Apply OWL-RL inference rules
302
+ owlrl.DeductiveClosure(owlrl.OWLRL_Semantics).expand(self.graph)
303
+
304
+ print(f"Ontology size after reasoning: {len(self.graph)} triples")
305
+
306
+ def create_detection_instance(self, image_path: str, detection_results: Dict) -> URIRef:
307
+ """Create a semantic instance of a disease detection result"""
308
+ # Create unique URI for this detection
309
+ detection_id = f"detection_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{hash(image_path) % 10000}"
310
+ detection_uri = self.DETECTION[detection_id]
311
+
312
+ # Add basic detection information
313
+ self.graph.add((detection_uri, RDF.type, self.DETECTION.DetectionResult))
314
+ self.graph.add((detection_uri, RDFS.label, Literal(f"Detection of {os.path.basename(image_path)}")))
315
+ self.graph.add((detection_uri, self.DETECTION.detectionTimestamp,
316
+ Literal(datetime.now(), datatype=XSD.dateTime)))
317
+
318
+ # Add image properties
319
+ if 'image_properties' in detection_results:
320
+ props = detection_results['image_properties']
321
+ if 'width' in props:
322
+ self.graph.add((detection_uri, self.DETECTION.imageWidth,
323
+ Literal(props['width'], datatype=XSD.integer)))
324
+ if 'height' in props:
325
+ self.graph.add((detection_uri, self.DETECTION.imageHeight,
326
+ Literal(props['height'], datatype=XSD.integer)))
327
+
328
+ # Add detection results
329
+ self.graph.add((detection_uri, self.DETECTION.severityPercentage,
330
+ Literal(detection_results['severity_percentage'], datatype=XSD.float)))
331
+ self.graph.add((detection_uri, self.DETECTION.numberOfRegions,
332
+ Literal(detection_results['num_diseased_regions'], datatype=XSD.integer)))
333
+
334
+ # Map disease level to semantic class
335
+ disease_level = detection_results['disease_level']
336
+ severity_uri = self._map_severity_to_uri(disease_level)
337
+ if severity_uri:
338
+ self.graph.add((detection_uri, self.DETECTION.hasSeverityLevel, severity_uri))
339
+
340
+ # Infer likely diseases based on detection patterns
341
+ inferred_diseases = self._infer_diseases(detection_results)
342
+ for disease_uri in inferred_diseases:
343
+ self.graph.add((detection_uri, self.DETECTION.hasDisease, disease_uri))
344
+
345
+ # Calculate economic impact
346
+ economic_impact = self._calculate_economic_impact(detection_results['severity_percentage'])
347
+ self.graph.add((detection_uri, self.AGRI.hasEconomicImpact, economic_impact))
348
+
349
+ return detection_uri
350
+
351
+ def _map_severity_to_uri(self, disease_level: str) -> Optional[URIRef]:
352
+ """Map disease level string to severity URI"""
353
+ mapping = {
354
+ "Healthy": self.AGRI.Healthy,
355
+ "Early Disease": self.AGRI.EarlyDisease,
356
+ "Moderate Disease": self.AGRI.ModerateDisease,
357
+ "Severe Disease": self.AGRI.SevereDisease,
358
+ "Critical Disease": self.AGRI.CriticalDisease,
359
+ }
360
+ return mapping.get(disease_level)
361
+
362
+ def _infer_diseases(self, detection_results: Dict) -> List[URIRef]:
363
+ """Infer likely diseases based on detection characteristics"""
364
+ inferred = []
365
+ severity = detection_results['severity_percentage']
366
+
367
+ # Simple inference based on severity and patterns
368
+ # In a real system, this would use more sophisticated visual analysis
369
+ if severity > 5: # If disease detected
370
+ # For demonstration, we'll use basic heuristics
371
+ # In practice, this would analyze color, texture, shape patterns
372
+ if severity < 15:
373
+ # Early stage diseases - could be multiple
374
+ inferred.extend([self.DISEASE.Alternaria, self.DISEASE.Anthracnose])
375
+ elif severity < 30:
376
+ # Moderate stage - more specific inference needed
377
+ inferred.append(self.DISEASE.Anthracnose)
378
+ else:
379
+ # Severe cases - possibly aggressive diseases
380
+ inferred.extend([self.DISEASE.Aspergillus, self.DISEASE.Lasiodiplodia])
381
+
382
+ return inferred
383
+
384
+ def _calculate_economic_impact(self, severity: float) -> URIRef:
385
+ """Calculate economic impact based on severity"""
386
+ if severity < 2:
387
+ return self.AGRI.NoImpact
388
+ elif severity < 8:
389
+ return self.AGRI.MinimalImpact
390
+ elif severity < 20:
391
+ return self.AGRI.ModerateImpact
392
+ elif severity < 40:
393
+ return self.AGRI.SevereImpact
394
+ else:
395
+ return self.AGRI.CriticalImpact
396
+
397
+ def process_image_with_semantics(self, image_path: str) -> Dict[str, Any]:
398
+ """Process image with disease detection and create semantic annotations"""
399
+ print(f"Processing image with semantic analysis: {image_path}")
400
+
401
+ # Run disease detection
402
+ detection_results = self.detector.process_image(image_path)
403
+
404
+ # Add image properties
405
+ image = cv2.imread(image_path)
406
+ if image is not None:
407
+ detection_results['image_properties'] = {
408
+ 'width': image.shape[1],
409
+ 'height': image.shape[0],
410
+ 'channels': image.shape[2] if len(image.shape) > 2 else 1
411
+ }
412
+
413
+ # Create semantic instance
414
+ detection_uri = self.create_detection_instance(image_path, detection_results)
415
+
416
+ # Query related semantic information
417
+ semantic_info = self.query_detection_semantics(detection_uri)
418
+
419
+ # Combine results
420
+ enhanced_results = {
421
+ **detection_results,
422
+ 'semantic_uri': str(detection_uri),
423
+ 'semantic_info': semantic_info,
424
+ 'ontology_inferences': self._get_ontology_inferences(detection_uri)
425
+ }
426
+
427
+ return enhanced_results
428
+
429
+ def query_detection_semantics(self, detection_uri: URIRef) -> Dict[str, Any]:
430
+ """Query semantic information about a detection"""
431
+ semantic_info = {
432
+ 'diseases': [],
433
+ 'severity_level': None,
434
+ 'economic_impact': None,
435
+ 'symptoms': [],
436
+ 'recommendations': []
437
+ }
438
+
439
+ # Query diseases
440
+ query = f"""
441
+ PREFIX detection: <{self.DETECTION}>
442
+ PREFIX disease: <{self.DISEASE}>
443
+ PREFIX rdfs: <{RDFS}>
444
+
445
+ SELECT ?disease ?diseaseLabel WHERE {{
446
+ <{detection_uri}> detection:hasDisease ?disease .
447
+ ?disease rdfs:label ?diseaseLabel .
448
+ }}
449
+ """
450
+
451
+ for row in self.graph.query(query):
452
+ semantic_info['diseases'].append({
453
+ 'uri': str(row.disease),
454
+ 'label': str(row.diseaseLabel)
455
+ })
456
+
457
+ # Query severity level
458
+ query = f"""
459
+ PREFIX detection: <{self.DETECTION}>
460
+ PREFIX agri: <{self.AGRI}>
461
+ PREFIX rdfs: <{RDFS}>
462
+
463
+ SELECT ?severityLevel ?severityLabel WHERE {{
464
+ <{detection_uri}> detection:hasSeverityLevel ?severityLevel .
465
+ ?severityLevel rdfs:label ?severityLabel .
466
+ }}
467
+ """
468
+
469
+ for row in self.graph.query(query):
470
+ semantic_info['severity_level'] = {
471
+ 'uri': str(row.severityLevel),
472
+ 'label': str(row.severityLabel)
473
+ }
474
+ break
475
+
476
+ # Query economic impact
477
+ query = f"""
478
+ PREFIX agri: <{self.AGRI}>
479
+ PREFIX rdfs: <{RDFS}>
480
+
481
+ SELECT ?impact ?impactLabel ?marketability WHERE {{
482
+ <{detection_uri}> agri:hasEconomicImpact ?impact .
483
+ ?impact rdfs:label ?impactLabel .
484
+ ?impact agri:marketabilityScore ?marketability .
485
+ }}
486
+ """
487
+
488
+ for row in self.graph.query(query):
489
+ semantic_info['economic_impact'] = {
490
+ 'uri': str(row.impact),
491
+ 'label': str(row.impactLabel),
492
+ 'marketability_score': float(row.marketability)
493
+ }
494
+ break
495
+
496
+ return semantic_info
497
+
498
+ def _get_ontology_inferences(self, detection_uri: URIRef) -> List[str]:
499
+ """Get ontology-based inferences and recommendations"""
500
+ inferences = []
501
+
502
+ # Query for related information using SPARQL
503
+ query = f"""
504
+ PREFIX detection: <{self.DETECTION}>
505
+ PREFIX disease: <{self.DISEASE}>
506
+ PREFIX visual: <{self.VISUAL}>
507
+ PREFIX rdfs: <{RDFS}>
508
+
509
+ SELECT ?disease ?symptom ?symptomLabel WHERE {{
510
+ <{detection_uri}> detection:hasDisease ?disease .
511
+ ?disease detection:hasSymptom ?symptom .
512
+ ?symptom rdfs:label ?symptomLabel .
513
+ }}
514
+ """
515
+
516
+ symptoms = []
517
+ for row in self.graph.query(query):
518
+ symptoms.append(str(row.symptomLabel))
519
+
520
+ if symptoms:
521
+ inferences.append(f"Detected visual symptoms: {', '.join(symptoms)}")
522
+
523
+ # Add treatment recommendations based on ontology
524
+ inferences.extend(self._get_treatment_recommendations(detection_uri))
525
+
526
+ return inferences
527
+
528
+ def _get_treatment_recommendations(self, detection_uri: URIRef) -> List[str]:
529
+ """Get treatment recommendations based on detected diseases"""
530
+ recommendations = []
531
+
532
+ # Query detected diseases
533
+ query = f"""
534
+ PREFIX detection: <{self.DETECTION}>
535
+ PREFIX disease: <{self.DISEASE}>
536
+ PREFIX rdfs: <{RDFS}>
537
+
538
+ SELECT ?disease ?diseaseLabel WHERE {{
539
+ <{detection_uri}> detection:hasDisease ?disease .
540
+ ?disease rdfs:label ?diseaseLabel .
541
+ }}
542
+ """
543
+
544
+ disease_labels = []
545
+ for row in self.graph.query(query):
546
+ disease_labels.append(str(row.diseaseLabel))
547
+
548
+ # Provide recommendations based on diseases
549
+ if "Alternaria" in disease_labels:
550
+ recommendations.append("Apply copper-based fungicide for Alternaria control")
551
+ recommendations.append("Improve air circulation and reduce humidity")
552
+
553
+ if "Anthracnose" in disease_labels:
554
+ recommendations.append("Use preventive fungicide sprays for Anthracnose")
555
+ recommendations.append("Remove infected fruits and debris")
556
+
557
+ if "Aspergillus" in disease_labels:
558
+ recommendations.append("Improve storage conditions to prevent Aspergillus")
559
+ recommendations.append("Reduce moisture and temperature in storage")
560
+
561
+ if "Lasiodiplodia" in disease_labels:
562
+ recommendations.append("Improve field sanitation for Lasiodiplodia control")
563
+ recommendations.append("Avoid mechanical damage during harvest")
564
+
565
+ return recommendations
566
+
567
+ def export_ontology(self, output_path: str, format: str = "turtle"):
568
+ """Export the ontology to a file"""
569
+ print(f"Exporting ontology to {output_path} in {format} format...")
570
+
571
+ with open(output_path, 'w', encoding='utf-8') as f:
572
+ f.write(self.graph.serialize(format=format))
573
+
574
+ print(f"Ontology exported successfully!")
575
+
576
+ def get_ontology_statistics(self) -> Dict[str, int]:
577
+ """Get statistics about the ontology"""
578
+ stats = {
579
+ 'total_triples': len(self.graph),
580
+ 'classes': len(list(self.graph.subjects(RDF.type, OWL.Class))),
581
+ 'object_properties': len(list(self.graph.subjects(RDF.type, OWL.ObjectProperty))),
582
+ 'datatype_properties': len(list(self.graph.subjects(RDF.type, OWL.DatatypeProperty))),
583
+ 'individuals': len(list(self.graph.subjects(RDF.type, self.DETECTION.DetectionResult))),
584
+ }
585
+ return stats
586
+
587
+ def query_ontology(self, sparql_query: str) -> List[Dict]:
588
+ """Execute a SPARQL query on the ontology"""
589
+ results = []
590
+ for row in self.graph.query(sparql_query):
591
+ result_dict = {}
592
+ for var in row.labels:
593
+ result_dict[var] = str(row[var])
594
+ results.append(result_dict)
595
+ return results
596
+
597
+ def demonstrate_semantic_detection():
598
+ """Demonstrate the semantic mango disease detection system"""
599
+ print("=" * 80)
600
+ print("SEMANTIC WEB ONTOLOGY FOR MANGO DISEASE DETECTION")
601
+ print("=" * 80)
602
+ print("Features:")
603
+ print("- OWL-RL reasoning for enhanced disease inference")
604
+ print("- Semantic annotation of detection results")
605
+ print("- Economic impact assessment")
606
+ print("- Treatment recommendations")
607
+ print("- SPARQL queries for knowledge discovery")
608
+ print()
609
+
610
+ # Initialize semantic system
611
+ print("Initializing semantic ontology manager...")
612
+ ontology_manager = MangoOntologyManager()
613
+
614
+ # Show ontology statistics
615
+ stats = ontology_manager.get_ontology_statistics()
616
+ print(f"Ontology Statistics:")
617
+ for key, value in stats.items():
618
+ print(f" {key.replace('_', ' ').title()}: {value}")
619
+ print()
620
+
621
+ return ontology_manager
622
+
623
+ if __name__ == "__main__":
624
+ # Demonstrate the system
625
+ demonstrate_semantic_detection()
src/semantic_disease_analyzer.py ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Semantic Mango Disease Analysis Integration
3
+ Combines computer vision detection with semantic web reasoning
4
+ """
5
+
6
+ import os
7
+ import json
8
+ from typing import Dict, List, Any
9
+ import cv2
10
+ import numpy as np
11
+ from datetime import datetime
12
+
13
+ # Import our ontology manager
14
+ from mango_disease_ontology import MangoOntologyManager
15
+
16
+ class SemanticDiseaseAnalyzer:
17
+ """
18
+ Enhanced disease analyzer that combines computer vision with semantic reasoning
19
+ """
20
+
21
+ def __init__(self):
22
+ self.ontology_manager = MangoOntologyManager()
23
+ print("Semantic Disease Analyzer initialized with OWL-RL reasoning!")
24
+
25
+ def analyze_image_semantically(self, image_path: str, save_results: bool = True) -> Dict[str, Any]:
26
+ """
27
+ Perform semantic analysis of mango disease image
28
+ """
29
+ print(f"\n{'='*60}")
30
+ print(f"SEMANTIC ANALYSIS: {os.path.basename(image_path)}")
31
+ print(f"{'='*60}")
32
+
33
+ try:
34
+ # Process with semantic enhancement
35
+ results = self.ontology_manager.process_image_with_semantics(image_path)
36
+
37
+ # Print enhanced results
38
+ self._print_semantic_results(results)
39
+
40
+ if save_results:
41
+ self._save_semantic_results(image_path, results)
42
+
43
+ return results
44
+
45
+ except FileNotFoundError:
46
+ print(f"Error: Image file not found: {image_path}")
47
+ return {}
48
+ except Exception as e:
49
+ print(f"Error during semantic analysis: {e}")
50
+ return {}
51
+
52
+ def _print_semantic_results(self, results: Dict[str, Any]):
53
+ """Print comprehensive semantic analysis results"""
54
+
55
+ # Basic detection results
56
+ print(f"COMPUTER VISION DETECTION:")
57
+ print(f" Disease Level: {results['disease_level']}")
58
+ print(f" Severity: {results['severity_percentage']:.2f}%")
59
+ print(f" Diseased Regions: {results['num_diseased_regions']}")
60
+
61
+ # Semantic information
62
+ semantic_info = results.get('semantic_info', {})
63
+
64
+ print(f"\nSEMANTIC REASONING:")
65
+
66
+ # Detected diseases from ontology
67
+ diseases = semantic_info.get('diseases', [])
68
+ if diseases:
69
+ print(f" Inferred Diseases:")
70
+ for disease in diseases:
71
+ print(f" - {disease['label']}")
72
+ else:
73
+ print(f" No diseases inferred from ontology")
74
+
75
+ # Severity classification
76
+ severity_level = semantic_info.get('severity_level')
77
+ if severity_level:
78
+ print(f" Ontology Severity: {severity_level['label']}")
79
+
80
+ # Economic impact
81
+ economic_impact = semantic_info.get('economic_impact')
82
+ if economic_impact:
83
+ print(f" Economic Impact: {economic_impact['label']}")
84
+ print(f" Marketability Score: {economic_impact['marketability_score']:.1f}%")
85
+
86
+ # Ontology inferences
87
+ inferences = results.get('ontology_inferences', [])
88
+ if inferences:
89
+ print(f"\nONTOLOGY INFERENCES:")
90
+ for i, inference in enumerate(inferences, 1):
91
+ print(f" {i}. {inference}")
92
+
93
+ # Treatment recommendations
94
+ recommendations = [inf for inf in inferences if 'Apply' in inf or 'Improve' in inf or 'Remove' in inf or 'Use' in inf or 'Avoid' in inf or 'Reduce' in inf]
95
+ if recommendations:
96
+ print(f"\nTREATMENT RECOMMENDATIONS:")
97
+ for i, rec in enumerate(recommendations, 1):
98
+ print(f" {i}. {rec}")
99
+
100
+ def _save_semantic_results(self, image_path: str, results: Dict[str, Any]):
101
+ """Save semantic analysis results to files"""
102
+ base_name = os.path.splitext(os.path.basename(image_path))[0]
103
+
104
+ # Save JSON results
105
+ json_path = f"semantic_analysis_{base_name}.json"
106
+ with open(json_path, 'w') as f:
107
+ # Make results JSON serializable
108
+ json_results = self._make_json_serializable(results)
109
+ json.dump(json_results, f, indent=2)
110
+
111
+ print(f"\nRESULTS SAVED:")
112
+ print(f" JSON Report: {json_path}")
113
+
114
+ # Save visual results
115
+ if 'output_image' in results:
116
+ output_path = f"semantic_detection_{base_name}.jpg"
117
+ cv2.imwrite(output_path, results['output_image'])
118
+ print(f" Visual Result: {output_path}")
119
+
120
+ # Save ontology visualization
121
+ self._save_ontology_visualization(base_name, results)
122
+
123
+ def _make_json_serializable(self, obj):
124
+ """Make object JSON serializable"""
125
+ if isinstance(obj, dict):
126
+ return {k: self._make_json_serializable(v) for k, v in obj.items()}
127
+ elif isinstance(obj, list):
128
+ return [self._make_json_serializable(item) for item in obj]
129
+ elif isinstance(obj, np.ndarray):
130
+ return f"numpy_array_shape_{obj.shape}"
131
+ elif isinstance(obj, np.integer):
132
+ return int(obj)
133
+ elif isinstance(obj, np.floating):
134
+ return float(obj)
135
+ else:
136
+ return obj
137
+
138
+ def _save_ontology_visualization(self, base_name: str, results: Dict[str, Any]):
139
+ """Save ontology-based visualization"""
140
+ turtle_path = f"ontology_{base_name}.ttl"
141
+ self.ontology_manager.export_ontology(turtle_path, format="turtle")
142
+ print(f" Ontology Export: {turtle_path}")
143
+
144
+ def batch_semantic_analysis(self, image_folder: str = "SenMangoFruitDDS_bgremoved"):
145
+ """Perform batch semantic analysis on dataset"""
146
+ print(f"\n{'='*80}")
147
+ print("BATCH SEMANTIC ANALYSIS OF MANGO DATASET")
148
+ print(f"{'='*80}")
149
+
150
+ if not os.path.exists(image_folder):
151
+ print(f"Error: Dataset folder not found: {image_folder}")
152
+ return
153
+
154
+ # Disease categories in dataset
155
+ categories = ["Healthy", "Alternaria", "Anthracnose", "Black Mould Rot", "Stem and Rot"]
156
+
157
+ batch_results = {}
158
+
159
+ for category in categories:
160
+ category_path = os.path.join(image_folder, category)
161
+ if not os.path.exists(category_path):
162
+ print(f"Warning: Category folder not found: {category_path}")
163
+ continue
164
+
165
+ print(f"\nProcessing category: {category}")
166
+ print("-" * 40)
167
+
168
+ # Get sample images from category
169
+ image_files = [f for f in os.listdir(category_path) if f.lower().endswith(('.jpg', '.jpeg', '.png'))]
170
+
171
+ # Process first 3 images from each category
172
+ sample_images = image_files[:3]
173
+
174
+ category_results = []
175
+
176
+ for image_file in sample_images:
177
+ image_path = os.path.join(category_path, image_file)
178
+ print(f"\n Analyzing: {image_file}")
179
+
180
+ results = self.analyze_image_semantically(image_path, save_results=False)
181
+ if results:
182
+ category_results.append({
183
+ 'filename': image_file,
184
+ 'expected_category': category,
185
+ 'detected_level': results.get('disease_level', 'Unknown'),
186
+ 'severity': results.get('severity_percentage', 0),
187
+ 'semantic_diseases': [d['label'] for d in results.get('semantic_info', {}).get('diseases', [])],
188
+ 'economic_impact': results.get('semantic_info', {}).get('economic_impact', {}).get('label', 'Unknown')
189
+ })
190
+
191
+ batch_results[category] = category_results
192
+
193
+ # Save batch results
194
+ self._save_batch_results(batch_results)
195
+
196
+ # Print summary
197
+ self._print_batch_summary(batch_results)
198
+
199
+ return batch_results
200
+
201
+ def _save_batch_results(self, batch_results: Dict):
202
+ """Save batch analysis results"""
203
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
204
+ json_path = f"batch_semantic_analysis_{timestamp}.json"
205
+
206
+ with open(json_path, 'w') as f:
207
+ json.dump(batch_results, f, indent=2)
208
+
209
+ print(f"\nBatch results saved: {json_path}")
210
+
211
+ def _print_batch_summary(self, batch_results: Dict):
212
+ """Print summary of batch analysis"""
213
+ print(f"\n{'='*80}")
214
+ print("BATCH SEMANTIC ANALYSIS SUMMARY")
215
+ print(f"{'='*80}")
216
+
217
+ total_images = 0
218
+ correct_detections = 0
219
+
220
+ for category, results in batch_results.items():
221
+ print(f"\n{category.upper()}:")
222
+
223
+ category_correct = 0
224
+
225
+ for result in results:
226
+ total_images += 1
227
+
228
+ expected = result['expected_category']
229
+ detected = result['detected_level']
230
+ semantic_diseases = result['semantic_diseases']
231
+
232
+ # Check if detection is reasonable
233
+ is_correct = False
234
+ if expected == "Healthy" and detected == "Healthy":
235
+ is_correct = True
236
+ elif expected != "Healthy" and detected != "Healthy":
237
+ is_correct = True
238
+
239
+ if is_correct:
240
+ correct_detections += 1
241
+ category_correct += 1
242
+
243
+ status = "[CORRECT]" if is_correct else "[INCORRECT]"
244
+
245
+ print(f" {status} {result['filename']}: {detected} ({result['severity']:.1f}%)")
246
+ if semantic_diseases:
247
+ print(f" Semantic: {', '.join(semantic_diseases)}")
248
+ print(f" Economic: {result['economic_impact']}")
249
+
250
+ accuracy = (category_correct / len(results) * 100) if results else 0
251
+ print(f" Category Accuracy: {accuracy:.1f}% ({category_correct}/{len(results)})")
252
+
253
+ overall_accuracy = (correct_detections / total_images * 100) if total_images > 0 else 0
254
+ print(f"\nOVERALL SEMANTIC DETECTION ACCURACY: {overall_accuracy:.1f}% ({correct_detections}/{total_images})")
255
+
256
+ def query_ontology_knowledge(self):
257
+ """Demonstrate ontology querying capabilities"""
258
+ print(f"\n{'='*80}")
259
+ print("ONTOLOGY KNOWLEDGE QUERIES")
260
+ print(f"{'='*80}")
261
+
262
+ # Query 1: All diseases and their symptoms
263
+ print("\nQuery 1: Diseases and their visual symptoms")
264
+ query1 = """
265
+ PREFIX disease: <http://spotradar.org/disease#>
266
+ PREFIX detection: <http://spotradar.org/detection#>
267
+ PREFIX visual: <http://spotradar.org/visual#>
268
+ PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
269
+
270
+ SELECT ?diseaseLabel ?symptomLabel WHERE {
271
+ ?disease a disease:Disease .
272
+ ?disease rdfs:label ?diseaseLabel .
273
+ ?disease detection:hasSymptom ?symptom .
274
+ ?symptom rdfs:label ?symptomLabel .
275
+ }
276
+ ORDER BY ?diseaseLabel
277
+ """
278
+
279
+ results1 = self.ontology_manager.query_ontology(query1)
280
+ for result in results1:
281
+ print(f" {result['diseaseLabel']} → {result['symptomLabel']}")
282
+
283
+ # Query 2: Economic impacts
284
+ print("\nQuery 2: Economic impact levels")
285
+ query2 = """
286
+ PREFIX agri: <http://spotradar.org/agriculture#>
287
+ PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
288
+
289
+ SELECT ?impactLabel ?marketability WHERE {
290
+ ?impact a agri:EconomicImpact .
291
+ ?impact rdfs:label ?impactLabel .
292
+ ?impact agri:marketabilityScore ?marketability .
293
+ }
294
+ ORDER BY DESC(?marketability)
295
+ """
296
+
297
+ results2 = self.ontology_manager.query_ontology(query2)
298
+ for result in results2:
299
+ print(f" {result['impactLabel']}: {float(result['marketability']):.0f}% marketability")
300
+
301
+ # Query 3: Severity levels
302
+ print("\nQuery 3: Disease severity classification")
303
+ query3 = """
304
+ PREFIX agri: <http://spotradar.org/agriculture#>
305
+ PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
306
+
307
+ SELECT ?severityLabel WHERE {
308
+ ?severity a agri:SeverityLevel .
309
+ ?severity rdfs:label ?severityLabel .
310
+ }
311
+ """
312
+
313
+ results3 = self.ontology_manager.query_ontology(query3)
314
+ for result in results3:
315
+ print(f" • {result['severityLabel']}")
316
+
317
+ def get_system_statistics(self):
318
+ """Get comprehensive system statistics"""
319
+ stats = self.ontology_manager.get_ontology_statistics()
320
+
321
+ print(f"\n{'='*60}")
322
+ print("SEMANTIC SYSTEM STATISTICS")
323
+ print(f"{'='*60}")
324
+
325
+ print(f"Ontology Metrics:")
326
+ for key, value in stats.items():
327
+ print(f" {key.replace('_', ' ').title()}: {value:,}")
328
+
329
+ return stats
330
+
331
+ def main():
332
+ """Main demonstration of semantic disease detection"""
333
+ print("SEMANTIC MANGO DISEASE DETECTION SYSTEM")
334
+ print("Integrating Computer Vision with OWL-RL Reasoning")
335
+
336
+ # Initialize analyzer
337
+ analyzer = SemanticDiseaseAnalyzer()
338
+
339
+ # Show system statistics
340
+ analyzer.get_system_statistics()
341
+
342
+ # Demonstrate ontology querying
343
+ analyzer.query_ontology_knowledge()
344
+
345
+ # Test single image analysis
346
+ print(f"\n{'='*80}")
347
+ print("SINGLE IMAGE SEMANTIC ANALYSIS DEMO")
348
+ print(f"{'='*80}")
349
+
350
+ # Try different disease types
351
+ test_images = [
352
+ "SenMangoFruitDDS_bgremoved/Healthy/healthy_003.jpg",
353
+ "SenMangoFruitDDS_bgremoved/Alternaria/Alternaria_005.jpg",
354
+ "SenMangoFruitDDS_bgremoved/Anthracnose/Anthracnose_002.jpg",
355
+ "SenMangoFruitDDS_bgremoved/Black Mould Rot/Aspergillus_001.jpg",
356
+ "SenMangoFruitDDS_bgremoved/Stem and Rot/Lasiodiplodia_012.jpg",
357
+ ]
358
+
359
+ for image_path in test_images:
360
+ if os.path.exists(image_path):
361
+ analyzer.analyze_image_semantically(image_path)
362
+ break # Just demo one for now
363
+
364
+ # Uncomment for batch analysis
365
+ # print(f"\n{'='*80}")
366
+ # print("BATCH SEMANTIC ANALYSIS")
367
+ # print(f"{'='*80}")
368
+ # analyzer.batch_semantic_analysis()
369
+
370
+ if __name__ == "__main__":
371
+ main()