File size: 5,608 Bytes
90bacf7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
"""
Meta Learner
Learns to combine base model predictions optimally.

Part of the complete blueprint implementation.
"""

import numpy as np
import pandas as pd
from typing import Dict, List, Optional
import logging

logger = logging.getLogger(__name__)

try:
    from sklearn.linear_model import LogisticRegression
    from sklearn.ensemble import GradientBoostingClassifier
    SKLEARN_AVAILABLE = True
except ImportError:
    SKLEARN_AVAILABLE = False


class MetaLearner:
    """
    Meta-learner that learns optimal combination of base models.
    
    Features:
    - Learns from base model predictions
    - Calibrated probability outputs
    - Automatic weight learning
    """
    
    def __init__(
        self,
        meta_model: str = 'logistic',
        calibrate: bool = True
    ):
        self.meta_model_type = meta_model
        self.calibrate = calibrate
        self.meta_model = None
        self.base_model_names: List[str] = []
        self.is_fitted = False
    
    def fit(
        self,
        base_predictions: Dict[str, np.ndarray],
        targets: np.ndarray
    ) -> 'MetaLearner':
        """
        Fit meta-learner on base model predictions.
        
        Args:
            base_predictions: Dict of model_name -> predictions array
            targets: True labels
        """
        if not SKLEARN_AVAILABLE:
            logger.warning("sklearn not available, using simple averaging")
            return self
        
        self.base_model_names = list(base_predictions.keys())
        
        # Stack predictions as features
        X = np.column_stack([base_predictions[name] for name in self.base_model_names])
        
        # Create meta-model
        if self.meta_model_type == 'logistic':
            self.meta_model = LogisticRegression(max_iter=1000)
        elif self.meta_model_type == 'gbm':
            self.meta_model = GradientBoostingClassifier(
                n_estimators=50, max_depth=3
            )
        else:
            self.meta_model = LogisticRegression(max_iter=1000)
        
        self.meta_model.fit(X, targets)
        self.is_fitted = True
        
        logger.info(f"Meta-learner fitted with {len(self.base_model_names)} base models")
        
        return self
    
    def predict(
        self,
        base_predictions: Dict[str, Dict]
    ) -> Dict:
        """
        Make prediction using meta-learner.
        
        Args:
            base_predictions: Dict of model_name -> prediction_dict
        """
        if not self.is_fitted or self.meta_model is None:
            # Fall back to averaging
            return self._average_predictions(base_predictions)
        
        # Extract probabilities from each model
        features = []
        for name in self.base_model_names:
            if name in base_predictions and '1x2' in base_predictions[name]:
                probs = base_predictions[name]['1x2']
                features.extend([
                    probs.get('home', 0.33),
                    probs.get('draw', 0.33),
                    probs.get('away', 0.34)
                ])
            else:
                features.extend([0.33, 0.33, 0.34])
        
        X = np.array(features).reshape(1, -1)
        
        probs = self.meta_model.predict_proba(X)[0]
        
        return {
            '1x2': {
                'home': round(float(probs[0]), 4),
                'draw': round(float(probs[1]), 4) if len(probs) > 1 else 0.25,
                'away': round(float(probs[2]), 4) if len(probs) > 2 else 0.35
            },
            'method': 'meta_learner',
            'base_models': self.base_model_names
        }
    
    def _average_predictions(
        self,
        base_predictions: Dict[str, Dict]
    ) -> Dict:
        """Simple average fallback."""
        home = draw = away = 0
        count = 0
        
        for name, pred in base_predictions.items():
            if '1x2' in pred:
                home += pred['1x2'].get('home', 0)
                draw += pred['1x2'].get('draw', 0)
                away += pred['1x2'].get('away', 0)
                count += 1
        
        if count == 0:
            return {'1x2': {'home': 0.4, 'draw': 0.25, 'away': 0.35}}
        
        return {
            '1x2': {
                'home': round(home / count, 4),
                'draw': round(draw / count, 4),
                'away': round(away / count, 4)
            },
            'method': 'average_fallback'
        }
    
    def get_model_weights(self) -> Dict[str, float]:
        """Get learned weights for base models."""
        if not self.is_fitted or self.meta_model is None:
            return {name: 1.0 for name in self.base_model_names}
        
        if hasattr(self.meta_model, 'coef_'):
            coefs = np.abs(self.meta_model.coef_).mean(axis=0)
            
            # Group by model (3 features per model)
            weights = {}
            for i, name in enumerate(self.base_model_names):
                start_idx = i * 3
                weights[name] = float(coefs[start_idx:start_idx + 3].mean())
            
            # Normalize
            total = sum(weights.values())
            if total > 0:
                weights = {k: v/total for k, v in weights.items()}
            
            return weights
        
        return {name: 1.0 / len(self.base_model_names) for name in self.base_model_names}


_meta_learner: Optional[MetaLearner] = None

def get_meta_learner() -> MetaLearner:
    global _meta_learner
    if _meta_learner is None:
        _meta_learner = MetaLearner()
    return _meta_learner