File size: 4,117 Bytes
90bacf7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
"""
LIME Explainer Module
Local interpretable model explanations.

Part of the complete blueprint implementation.
"""

import numpy as np
from typing import Dict, List, Optional, Any
import logging

logger = logging.getLogger(__name__)

try:
    import lime
    from lime.lime_tabular import LimeTabularExplainer
    LIME_AVAILABLE = True
except ImportError:
    LIME_AVAILABLE = False


class LIMEExplainer:
    """
    LIME-based model explanation.
    
    Features:
    - Local explanations
    - Model-agnostic
    - Feature contributions
    """
    
    def __init__(self):
        self.explainer = None
        self.feature_names = []
        self.class_names = ['H', 'D', 'A']
    
    def fit(
        self,
        X_train: np.ndarray,
        feature_names: List[str] = None,
        class_names: List[str] = None,
        mode: str = 'classification'
    ) -> 'LIMEExplainer':
        """
        Fit LIME explainer.
        """
        self.feature_names = feature_names or [f'feature_{i}' for i in range(X_train.shape[1])]
        self.class_names = class_names or self.class_names
        self.mode = mode
        
        if not LIME_AVAILABLE:
            logger.warning("LIME not available")
            return self
        
        try:
            self.explainer = LimeTabularExplainer(
                X_train,
                feature_names=self.feature_names,
                class_names=self.class_names,
                mode=mode
            )
            logger.info("LIME explainer fitted successfully")
        except Exception as e:
            logger.warning(f"Could not create LIME explainer: {e}")
        
        return self
    
    def explain_prediction(
        self,
        X: np.ndarray,
        predict_fn: callable,
        num_features: int = 10
    ) -> Dict:
        """
        Explain a single prediction.
        """
        if self.explainer is None or not LIME_AVAILABLE:
            return self._fallback_explanation()
        
        try:
            if len(X.shape) > 1:
                X = X[0]
            
            explanation = self.explainer.explain_instance(
                X,
                predict_fn,
                num_features=num_features
            )
            
            # Extract features
            feature_weights = explanation.as_list()
            
            return {
                'top_features': [
                    {'feature': f, 'contribution': round(w, 4)}
                    for f, w in feature_weights
                ],
                'positive_contributors': [
                    {'feature': f, 'contribution': round(w, 4)}
                    for f, w in feature_weights if w > 0
                ],
                'negative_contributors': [
                    {'feature': f, 'contribution': round(w, 4)}
                    for f, w in feature_weights if w < 0
                ],
                'local_prediction': explanation.local_pred[0] if hasattr(explanation, 'local_pred') else None
            }
            
        except Exception as e:
            logger.warning(f"LIME explanation failed: {e}")
            return self._fallback_explanation()
    
    def _fallback_explanation(self) -> Dict:
        """Fallback when LIME unavailable."""
        return {
            'method': 'unavailable',
            'top_features': [],
            'message': 'LIME not available or explanation failed'
        }
    
    def generate_html_explanation(
        self,
        X: np.ndarray,
        predict_fn: callable
    ) -> str:
        """Generate HTML explanation."""
        if self.explainer is None or not LIME_AVAILABLE:
            return "<p>LIME not available</p>"
        
        try:
            explanation = self.explainer.explain_instance(X[0] if len(X.shape) > 1 else X, predict_fn)
            return explanation.as_html()
        except Exception:
            return "<p>Explanation failed</p>"


_explainer: Optional[LIMEExplainer] = None

def get_explainer() -> LIMEExplainer:
    global _explainer
    if _explainer is None:
        _explainer = LIMEExplainer()
    return _explainer