ModelSmith-AI / backend /core /explainability.py
ACA050's picture
Upload 79 files
a309487 verified
raw
history blame
1.08 kB
import shap
import numpy as np
class ExplainabilityEngine:
def explain_tabular(self, model_pipeline, X_sample):
if X_sample.empty:
raise ValueError("Sample data is empty, cannot compute explanations")
# Extract trained model and preprocessor
preprocessor = model_pipeline.named_steps["preprocessor"]
model = model_pipeline.named_steps["model"]
X_transformed = preprocessor.transform(X_sample)
if X_transformed.shape[0] == 0:
raise ValueError("Transformed sample data is empty after preprocessing")
explainer = shap.Explainer(model, X_transformed)
shap_values = explainer(X_transformed, check_additivity=False)
if shap_values is None or shap_values.values is None:
raise ValueError("SHAP computation failed")
global_importance = np.abs(shap_values.values).mean(axis=0).tolist()
if len(global_importance) == 0:
raise ValueError("No feature importance computed")
return global_importance