File size: 1,382 Bytes
669d6a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import sys
import os
sys.path.append(os.getcwd())
import joblib
from pathlib import Path

import __main__
if not hasattr(__main__, "calculate_daily_volatility"):
    def calculate_daily_volatility(*args, **kwargs): pass
    __main__.calculate_daily_volatility = calculate_daily_volatility

def check_features():
    path = Path("Models/Dual_Production")
    files = list(path.rglob("*.joblib"))
    if not files:
        print("No models found")
        return
        
    model_data = joblib.load(files[-1])
    # Some metadata might contain feature names, or the model itself does
    # Let's see if metadata has it
    print("Keys in joblib file:", model_data.keys())
    
    # Try to find the training features CSV file
    csv_files = list(path.rglob("cv_results.csv"))
    if csv_files:
        print("Found CSV, checking columns if possible")
        # The reports directory also has a training_summary.html with feature importance
        
    # Let's just find ANY dataset file that was saved or check the model directly
    model = model_data["model"]
    if hasattr(model, "feature_names_in_"):
        features = model.feature_names_in_
        print(f"Features ({len(features)}):")
        for i, f in enumerate(features):
            print(f"{i}: {f}")
    else:
        print("Model doesn't have feature_names_in_")

if __name__ == "__main__":
    check_features()