MissingBreath commited on
Commit
f20c39e
·
verified ·
1 Parent(s): 44dba3a

Update api.py

Browse files
Files changed (1) hide show
  1. api.py +78 -1
api.py CHANGED
@@ -8,6 +8,7 @@ import os
8
  import google.generativeai as genai
9
  from pydantic import BaseModel
10
  from ultralytics import YOLO
 
11
 
12
  from langchain.document_loaders import TextLoader # Or a custom loader for .docs
13
  from langchain.text_splitter import RecursiveCharacterTextSplitter
@@ -194,6 +195,72 @@ def predict_crop_disease(input_data):
194
 
195
  return prediction[0] #predicted_label[0]
196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
  app = FastAPI()
198
 
199
  @app.post("/classify")
@@ -206,9 +273,19 @@ async def classify(image: UploadFile = File(...)):
206
  predictions = model.predict(img_array)
207
  predicted_class_idx = np.argmax(predictions)
208
  predicted_class_idx = int(predicted_class_idx)
209
- return {"prediction": predicted_class_idx}
 
 
 
 
 
 
 
 
 
210
  else:
211
  return {"error": "No image provided"}
 
212
 
213
  yolomodel = YOLO("yolo11m.pt")
214
  @app.post("/multiclassify")
 
8
  import google.generativeai as genai
9
  from pydantic import BaseModel
10
  from ultralytics import YOLO
11
+ import matplotlib.pyplot as plt
12
 
13
  from langchain.document_loaders import TextLoader # Or a custom loader for .docs
14
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
195
 
196
  return prediction[0] #predicted_label[0]
197
 
198
+ def make_gradcam_heatmap(img_array, model, last_conv_layer_name, pred_index=None):
199
+ grad_model = tf.keras.models.Model(
200
+ [model.inputs], [model.get_layer(last_conv_layer_name).output, model.output]
201
+ )
202
+
203
+ with tf.GradientTape() as tape:
204
+ last_conv_layer_output, preds = grad_model(img_array)
205
+ if pred_index is None:
206
+ pred_index = tf.argmax(preds[0])
207
+ class_channel = preds[:, pred_index]
208
+
209
+ grads = tape.gradient(class_channel, last_conv_layer_output)
210
+ pooled_grads = tf.reduce_mean(grads, axis=(0, 1, 2))
211
+ last_conv_layer_output = last_conv_layer_output[0]
212
+ heatmap = last_conv_layer_output @ pooled_grads[..., tf.newaxis]
213
+ heatmap = tf.squeeze(heatmap)
214
+ heatmap = tf.maximum(heatmap, 0) / tf.math.reduce_max(heatmap)
215
+ return heatmap.numpy()
216
+
217
+
218
+ def display_gradcam(img, heatmap, alpha=0.4):
219
+ # Create heatmap from the given heatmap values
220
+ heatmap = np.uint8(255 * heatmap)
221
+ jet = plt.cm.get_cmap("jet")
222
+ jet_colors = jet(np.arange(256))[:, :3]
223
+ jet_heatmap = jet_colors[heatmap]
224
+ jet_heatmap = tf.keras.preprocessing.image.array_to_img(jet_heatmap)
225
+ jet_heatmap = jet_heatmap.resize((img.shape[1], img.shape[0]))
226
+ jet_heatmap = tf.keras.preprocessing.image.img_to_array(jet_heatmap)
227
+
228
+ # Superimpose the heatmap onto the original image
229
+ superimposed_img = jet_heatmap * alpha + img
230
+ superimposed_img = tf.keras.preprocessing.image.array_to_img(superimposed_img)
231
+
232
+ # Save the image to a BytesIO object instead of showing it with plt.imshow()
233
+ img_byte_arr = BytesIO()
234
+ superimposed_img.save(img_byte_arr, format='PNG')
235
+ img_byte_arr = img_byte_arr.getvalue()
236
+
237
+ # Return the image as base64
238
+ return base64.b64encode(img_byte_arr).decode('utf-8')
239
+
240
+
241
+ def calculate_activation_ratio(heatmap, threshold=0.45):
242
+ """Calculates the ratio of activated to non-activated pixels in a heatmap.
243
+
244
+ Args:
245
+ heatmap: A NumPy array representing the Grad-CAM heatmap.
246
+ threshold: The threshold for classifying pixels as activated or not.
247
+
248
+ Returns:
249
+ The ratio of activated pixels to non-activated pixels.
250
+ """
251
+ activated_pixels = np.sum(heatmap > threshold)
252
+ total_pixels = heatmap.size
253
+ non_activated_pixels = total_pixels - activated_pixels
254
+
255
+ if non_activated_pixels == 0:
256
+ return 1.0 # Avoid division by zero if all pixels are activated
257
+
258
+ return activated_pixels / non_activated_pixels
259
+
260
+ # Example usage within the existing code (assuming heatmap is calculated as before):
261
+
262
+ last_conv_layer_name = "block3_conv2"
263
+
264
  app = FastAPI()
265
 
266
  @app.post("/classify")
 
273
  predictions = model.predict(img_array)
274
  predicted_class_idx = np.argmax(predictions)
275
  predicted_class_idx = int(predicted_class_idx)
276
+
277
+ heatmap = make_gradcam_heatmap(img_array, model, last_conv_layer_name)
278
+ base64_image = display_gradcam(img, heatmap, alpha)
279
+
280
+ # Return the base64 encoded image in the response
281
+
282
+ # Calculate and print the activation ratio
283
+ ratio = calculate_activation_ratio(heatmap)
284
+
285
+ return {"prediction": predicted_class_idx,"gradcam": base64_image,"ration":ratio}
286
  else:
287
  return {"error": "No image provided"}
288
+
289
 
290
  yolomodel = YOLO("yolo11m.pt")
291
  @app.post("/multiclassify")