rkonan commited on
Commit
2a4e3dd
·
1 Parent(s): 4aa79a1

ajout nouveau type cache

Browse files
Files changed (4) hide show
  1. .gitignore +1 -1
  2. Dockerfile +1 -1
  3. app/model.py +28 -33
  4. requirements.txt +1 -0
.gitignore CHANGED
@@ -19,4 +19,4 @@ app/__pycache__/
19
  *.log
20
 
21
  #cache des heatmaps
22
- cache_heatmaps/
 
19
  *.log
20
 
21
  #cache des heatmaps
22
+ cache/
Dockerfile CHANGED
@@ -13,7 +13,7 @@ ENV HOME=/home/user
13
  ENV PATH=/home/user/.local/bin:$PATH
14
 
15
  # Créer la structure de dossiers complète avec les bonnes permissions (MODIFIÉ)
16
- RUN mkdir -p /home/user/app/cache_heatmaps && \
17
  chown -R user:user /home/user/app && \
18
  chmod -R 755 /home/user/app
19
 
 
13
  ENV PATH=/home/user/.local/bin:$PATH
14
 
15
  # Créer la structure de dossiers complète avec les bonnes permissions (MODIFIÉ)
16
+ RUN mkdir -p /home/user/app/cache && \
17
  chown -R user:user /home/user/app && \
18
  chmod -R 755 /home/user/app
19
 
app/model.py CHANGED
@@ -21,13 +21,12 @@ from tf_keras_vis.utils import normalize
21
  import logging
22
  import time
23
  import os
24
-
25
  from typing import TypedDict, Callable, Any
26
 
27
- HEATMAP_CACHE = {}
28
- CACHE_DIR = "./cache_heatmaps"
29
  os.makedirs(CACHE_DIR, exist_ok=True)
30
-
31
 
32
  logging.basicConfig(
33
  level=logging.INFO, # ou logging.DEBUG
@@ -336,26 +335,17 @@ def hash_image_bytes(image_bytes):
336
  def get_heatmap(config, image_bytes: bytes, predicted_class_index):
337
  result = {}
338
  try:
339
- image_hash = hash_image_bytes(image_bytes)
340
- cache_key = f"{image_hash}_{predicted_class_index}"
341
 
342
- # Vérification cache mémoire d'abord
343
- if cache_key in HEATMAP_CACHE:
344
- logger.info(f"✅ Heatmap trouvée dans cache mémoire pour {cache_key}")
345
- result["heatmap"] = HEATMAP_CACHE[cache_key]
346
  return result
347
 
348
- # Vérification cache disque ensuite
349
- cache_file_path = os.path.join(CACHE_DIR, f"{cache_key}.pkl")
350
- if os.path.exists(cache_file_path):
351
- logger.info(f"✅ Heatmap trouvée sur disque pour {cache_key}")
352
- with open(cache_file_path, "rb") as f:
353
- cached_heatmap = pickle.load(f)
354
- result["heatmap"] = cached_heatmap
355
- # On remet aussi en mémoire pour accélérer prochaines requêtes
356
- HEATMAP_CACHE[cache_key] = cached_heatmap
357
- return result
358
 
 
359
  # Calcul si non trouvé dans le cache
360
  _, raw_input = preprocess_image(
361
  image_bytes, config["target_size"], config["preprocess_input"]
@@ -377,14 +367,7 @@ def get_heatmap(config, image_bytes: bytes, predicted_class_index):
377
  # Conversion en liste pour le JSON
378
  heatmap_list = heatmap.tolist()
379
  result["heatmap"] = heatmap_list
380
-
381
- # Sauvegarde dans cache mémoire
382
- HEATMAP_CACHE[cache_key] = heatmap_list
383
-
384
- # Sauvegarde sur disque
385
- with open(cache_file_path, "wb") as f:
386
- pickle.dump(heatmap_list, f)
387
-
388
  except Exception as e:
389
  logger.error(f"❌ Erreur lors de la génération de la heatmap: {e}")
390
  result["heatmap"] = []
@@ -418,10 +401,17 @@ def get_heatmap_old(config, image_bytes: bytes,predicted_class_index):
418
  result["heatmap"] = []
419
  return result
420
 
421
- def predict_with_model(config, image_bytes: bytes):
422
-
423
- #input_array,raw_input = preprocess_image(image_bytes,config["target_size"],config["preprocess_input"])
424
 
 
 
 
 
 
 
 
 
 
 
425
  try:
426
  logger.info("📤 Lecture des bytes et conversion en image PIL")
427
  image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
@@ -436,13 +426,18 @@ def predict_with_model(config, image_bytes: bytes):
436
  confidence = float(np.max(preds))
437
  entropy=float(compute_entropy_safe(preds))
438
  logger.info(f"✅ Prédiction : classe={predicted_class_index}, confiance={confidence:.4f},entropy={entropy:.4f}")
439
-
440
- return {
441
  "preds": preds.tolist(),
442
  "predicted_class": predicted_class_index,
443
  "confidence": confidence,
444
  "entropy":entropy
445
  }
 
 
 
 
 
 
446
 
447
 
448
  def predict_with_model_old(config, image_bytes: bytes):
 
21
  import logging
22
  import time
23
  import os
24
+ import diskcache as dc
25
  from typing import TypedDict, Callable, Any
26
 
27
+ CACHE_DIR = './cache'
 
28
  os.makedirs(CACHE_DIR, exist_ok=True)
29
+ cache = dc.Cache(CACHE_DIR)
30
 
31
  logging.basicConfig(
32
  level=logging.INFO, # ou logging.DEBUG
 
335
  def get_heatmap(config, image_bytes: bytes, predicted_class_index):
336
  result = {}
337
  try:
338
+ hash_key = hash_image_bytes(image_bytes)
339
+ heatmap_key = f"{hash_key}_heatmap"
340
 
341
+ # Vérification cache mémoire d'abord
342
+ if heatmap_key in cache:
343
+ logger.info(f"✅ Heatmap trouvée dans le cache {heatmap_key}")
344
+ result["heatmap"] = cache[heatmap_key]
345
  return result
346
 
 
 
 
 
 
 
 
 
 
 
347
 
348
+ #
349
  # Calcul si non trouvé dans le cache
350
  _, raw_input = preprocess_image(
351
  image_bytes, config["target_size"], config["preprocess_input"]
 
367
  # Conversion en liste pour le JSON
368
  heatmap_list = heatmap.tolist()
369
  result["heatmap"] = heatmap_list
370
+ cache[heatmap_key] = heatmap_list
 
 
 
 
 
 
 
371
  except Exception as e:
372
  logger.error(f"❌ Erreur lors de la génération de la heatmap: {e}")
373
  result["heatmap"] = []
 
401
  result["heatmap"] = []
402
  return result
403
 
 
 
 
404
 
405
+
406
+
407
+ def predict_with_cache(config, image_bytes: bytes):
408
+ hash_key = hash_image_bytes(image_bytes)
409
+ pred_key = f"{hash_key}_pred"
410
+
411
+ if pred_key in cache:
412
+ logger.info(f"✅ prédiction trouvée dans le cache {hash_key}")
413
+ return cache[pred_key]
414
+
415
  try:
416
  logger.info("📤 Lecture des bytes et conversion en image PIL")
417
  image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
 
426
  confidence = float(np.max(preds))
427
  entropy=float(compute_entropy_safe(preds))
428
  logger.info(f"✅ Prédiction : classe={predicted_class_index}, confiance={confidence:.4f},entropy={entropy:.4f}")
429
+ result={
 
430
  "preds": preds.tolist(),
431
  "predicted_class": predicted_class_index,
432
  "confidence": confidence,
433
  "entropy":entropy
434
  }
435
+ cache[pred_key] = result
436
+ return
437
+
438
+ def predict_with_model(config, image_bytes: bytes):
439
+ return predict_with_cache(config, image_bytes)
440
+
441
 
442
 
443
  def predict_with_model_old(config, image_bytes: bytes):
requirements.txt CHANGED
@@ -95,3 +95,4 @@ Werkzeug==3.1.3
95
  wheel==0.45.1
96
  wrapt==1.17.2
97
  yarl==1.20.1
 
 
95
  wheel==0.45.1
96
  wrapt==1.17.2
97
  yarl==1.20.1
98
+ diskcache