akshayboora/afml / check_features.py
akshayboora's picture
download
raw
1.38 kB
import sys
import os
sys.path.append(os.getcwd())
import joblib
from pathlib import Path
import __main__
if not hasattr(__main__, "calculate_daily_volatility"):
def calculate_daily_volatility(*args, **kwargs): pass
__main__.calculate_daily_volatility = calculate_daily_volatility
def check_features():
path = Path("Models/Dual_Production")
files = list(path.rglob("*.joblib"))
if not files:
print("No models found")
return
model_data = joblib.load(files[-1])
# Some metadata might contain feature names, or the model itself does
# Let's see if metadata has it
print("Keys in joblib file:", model_data.keys())
# Try to find the training features CSV file
csv_files = list(path.rglob("cv_results.csv"))
if csv_files:
print("Found CSV, checking columns if possible")
# The reports directory also has a training_summary.html with feature importance
# Let's just find ANY dataset file that was saved or check the model directly
model = model_data["model"]
if hasattr(model, "feature_names_in_"):
features = model.feature_names_in_
print(f"Features ({len(features)}):")
for i, f in enumerate(features):
print(f"{i}: {f}")
else:
print("Model doesn't have feature_names_in_")
if __name__ == "__main__":
check_features()

Xet Storage Details

Size:
1.38 kB
·
Xet hash:
4d1a6b45d8578630ffc0022205e9ce77ed153e0e4a0581471e7827e9a1b21533

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.