Spaces:
Paused
Paused
correction bug affichage gradcam online
Browse files- app/__pycache__/main.cpython-312.pyc +0 -0
- app/__pycache__/model.cpython-312.pyc +0 -0
- app/__pycache__/voting.cpython-312.pyc +0 -0
- app/model.py +23 -9
- app/voting.py +2 -2
app/__pycache__/main.cpython-312.pyc
CHANGED
|
Binary files a/app/__pycache__/main.cpython-312.pyc and b/app/__pycache__/main.cpython-312.pyc differ
|
|
|
app/__pycache__/model.cpython-312.pyc
CHANGED
|
Binary files a/app/__pycache__/model.cpython-312.pyc and b/app/__pycache__/model.cpython-312.pyc differ
|
|
|
app/__pycache__/voting.cpython-312.pyc
CHANGED
|
Binary files a/app/__pycache__/voting.cpython-312.pyc and b/app/__pycache__/voting.cpython-312.pyc differ
|
|
|
app/model.py
CHANGED
|
@@ -20,7 +20,7 @@ import tensorflow as tf
|
|
| 20 |
from tf_keras_vis.saliency import Saliency
|
| 21 |
from tf_keras_vis.utils import normalize
|
| 22 |
import logging
|
| 23 |
-
|
| 24 |
|
| 25 |
from typing import TypedDict, Callable, Any
|
| 26 |
logging.basicConfig(
|
|
@@ -352,17 +352,31 @@ def predict_with_model(config, image_bytes: bytes,show_heatmap=False):
|
|
| 352 |
"entropy":entropy,
|
| 353 |
"is_uncertain_model":is_uncertain_model
|
| 354 |
}
|
| 355 |
-
|
| 356 |
if show_heatmap and not is_uncertain_model:
|
| 357 |
-
|
| 358 |
-
|
|
|
|
| 359 |
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
|
|
|
|
|
|
|
| 365 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 366 |
|
| 367 |
|
| 368 |
return result
|
|
|
|
| 20 |
from tf_keras_vis.saliency import Saliency
|
| 21 |
from tf_keras_vis.utils import normalize
|
| 22 |
import logging
|
| 23 |
+
import time
|
| 24 |
|
| 25 |
from typing import TypedDict, Callable, Any
|
| 26 |
logging.basicConfig(
|
|
|
|
| 352 |
"entropy":entropy,
|
| 353 |
"is_uncertain_model":is_uncertain_model
|
| 354 |
}
|
|
|
|
| 355 |
if show_heatmap and not is_uncertain_model:
|
| 356 |
+
try:
|
| 357 |
+
logger.info("✅ Début de la génération de la heatmap")
|
| 358 |
+
start_time = time.time()
|
| 359 |
|
| 360 |
+
# Vérification des entrées
|
| 361 |
+
logger.info(f"🖼️ Image d'entrée shape: {raw_input.shape}")
|
| 362 |
+
logger.info(f"🎯 Index de classe prédite: {predicted_class_index}")
|
| 363 |
+
logger.info(f"🛠️ Dernière couche utilisée: {config['last_conv_layer']}")
|
| 364 |
+
|
| 365 |
+
# Calcul de la heatmap
|
| 366 |
+
heatmap = compute_gradcam(config["gradcam_model"], raw_input, class_index=predicted_class_index, layer_name=config["last_conv_layer"])
|
| 367 |
|
| 368 |
+
elapsed_time = time.time() - start_time
|
| 369 |
+
logger.info(f"✅ Heatmap générée en {elapsed_time:.2f} secondes")
|
| 370 |
+
|
| 371 |
+
# Conversion en liste pour le JSON
|
| 372 |
+
result["heatmap"] = heatmap.tolist()
|
| 373 |
+
|
| 374 |
+
except Exception as e:
|
| 375 |
+
logger.error(f"❌ Erreur lors de la génération de la heatmap: {e}")
|
| 376 |
+
result["heatmap"] = []
|
| 377 |
+
else:
|
| 378 |
+
logger.info("ℹ️ Heatmap non générée (option désactivée ou modèle incertain)")
|
| 379 |
+
result["heatmap"] = []
|
| 380 |
|
| 381 |
|
| 382 |
return result
|
app/voting.py
CHANGED
|
@@ -75,11 +75,11 @@ async def soft_voting(model_configs,image_bytes: bytes,mode,show_heatmap):
|
|
| 75 |
|
| 76 |
if show_heatmap:
|
| 77 |
heatmap = prediction.get("heatmap")
|
| 78 |
-
if
|
| 79 |
models_heatmaps.append(heatmap)
|
| 80 |
else:
|
| 81 |
logger.warning(f"⚠️ Heatmap vide ou invalide, non ajoutée pour le modèle {config['model_name']}")
|
| 82 |
-
|
| 83 |
|
| 84 |
models.append(config["model_name"])
|
| 85 |
logger.info(f"📊 Prédictions ajoutées pour {config['model_name']}")
|
|
|
|
| 75 |
|
| 76 |
if show_heatmap:
|
| 77 |
heatmap = prediction.get("heatmap")
|
| 78 |
+
if heatmap and len(heatmap) > 0:
|
| 79 |
models_heatmaps.append(heatmap)
|
| 80 |
else:
|
| 81 |
logger.warning(f"⚠️ Heatmap vide ou invalide, non ajoutée pour le modèle {config['model_name']}")
|
| 82 |
+
logger.info(f"Taille heatmaps :{len(models_heatmaps)}")
|
| 83 |
|
| 84 |
models.append(config["model_name"])
|
| 85 |
logger.info(f"📊 Prédictions ajoutées pour {config['model_name']}")
|