File size: 2,766 Bytes
abb9de4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
# learning_hub/model_trainer.py
# (V32.0 - DORMANT: Model Retraining Logic)
# ⚠️ This module is currently DISABLED by default.
# It is kept for future manual triggering of model retraining.

import os
import asyncio
import traceback
import numpy as np
import pandas as pd
import xgboost as xgb
from typing import List, Dict

class ModelTrainer:
    """
    🏋️‍♂️ The Gym: Responsible for heavy lifting (Model Retraining).
    Not active in the daily loop. Only triggered manually for regime updates.
    """
    def __init__(self, r2_service):
        self.r2 = r2_service
        self.local_model_path = "ml_models/layer2/Titan_XGB_V1.json"
        print("💤 [ModelTrainer] Module loaded but dormant.")

    async def train_model_manually(self, training_data: List[Dict]):
        """
        دالة للتدريب اليدوي عند الحاجة.
        تتوقع قائمة من القواميس تحتوي على 'features' و 'label'.
        """
        print("🏋️‍♂️ [ModelTrainer] Manual training sequence started...")
        try:
            if not training_data:
                print("❌ [Trainer] No data provided.")
                return

            # 1. Prepare Data
            df = pd.DataFrame(training_data)
            if 'features' not in df.columns or 'label' not in df.columns:
                print("❌ [Trainer] Data missing features/label columns.")
                return

            X = np.array(df['features'].tolist())
            y = np.array(df['label'].tolist())
            dtrain = xgb.DMatrix(X, label=y)

            # 2. Load Existing Model (Warm Start)
            model = xgb.Booster()
            if os.path.exists(self.local_model_path):
                model.load_model(self.local_model_path)
                print("   -> Loaded existing model for incremental training.")
            
            # 3. Train (Incremental)
            params = {
                'eta': 0.01,
                'max_depth': 6,
                'objective': 'binary:logistic',
                'eval_metric': 'logloss'
            }
            
            # تحديث النموذج بـ 50 جولة فقط
            new_model = xgb.train(params, dtrain, num_boost_round=50, xgb_model=model)
            
            # 4. Save
            new_model.save_model(self.local_model_path)
            print(f"✅ [Trainer] Model updated and saved to {self.local_model_path}")
            
            # 5. Upload to R2 (Logic commented out until needed)
            # await self.r2.upload_file_async(...)
            
            return "Training Success"

        except Exception as e:
            print(f"❌ [Trainer] Training failed: {e}")
            traceback.print_exc()
            return f"Error: {e}"