File size: 5,684 Bytes
c687548 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 | # -*- coding: utf-8 -*-
# @Time : 2025/7
# @Author : Lukax
# @Email : Lukarxiang@gmail.com
# @File : optimize_params.py
# -*- presentd: PyCharm -*-
import os
import json
import argparse
import pandas as pd
from Utils import set_seed
from Settings import Config
from inplemental import load_data
from HyperparameterOptimizer import HyperparameterManager, quick_optimize_single_model
def parse_args():
parser = argparse.ArgumentParser(description='超参数优化工具')
parser.add_argument('--model', type = str, choices = ['xgb', 'lgb', 'cat', 'rf'], help = '选择要优化的模型')
parser.add_argument('--all', action = 'store_true', help = '优化所有模型')
parser.add_argument('--trials', type = int, default = 200, help = '搜参尝试次数')
parser.add_argument('--cv-folds', type = int, default = 5, help = '交叉验证折数')
parser.add_argument('--sample-ratio', type = float, default = None, help = '数据采样比例,用于快速测试 (默认全量)')
parser.add_argument('--update-config', action = 'store_true', help = '是否自动更新Config文件')
parser.add_argument('--output-dir', type = str, default = os.path.join('results', 'optimization_results'), help = '结果输出目录')
return parser.parse_args()
def prepare_data(sample_ratio = None):
train, test, submission = load_data()
X, y = train[Config.FEATURES].fillna(0).values, train[Config.TARGET].values
if sample_ratio and sample_ratio < 1:
sample_size = int(len(X) * sample_ratio)
print(f"sample ratio {sample_ratio}, num {sample_size}")
indices = pd.Series(range(len(X))).sample(sample_size, random_state = Config.RANDOM_STATE)
X, y = X[indices], y[indices]
return X, y
def optimize_single_model(model_name, X, y, trials, cv_folds, output_dir):
result = quick_optimize_single_model(model_name, X, y, n_trials = trials)
result_path = os.path.join(output_dir, f'{model_name}_optimization_result.json')
with open(result_path, 'w', encoding = 'utf-8') as f:
json.dump({
'model_name': model_name,
'best_params': result['best_params'],
'best_score': result['best_score'],
'n_trials': result['n_trials'],
'optimization_time': str(pd.Timestamp.now())
}, f, indent = 2, ensure_ascii = False)
print(f"{model_name} optimization completed!")
print(f"Results saved to: {result_path}")
return result
def optimize_all_models(X, y, trials, cv_folds, output_dir):
manager = HyperparameterManager()
results = manager.optimize_all_models(X, y, n_trials = trials, cv_folds = cv_folds)
history_path = os.path.join(output_dir, 'optimization_history.png') # 绘制优化历史
manager.plot_optimization_history(history_path)
summary_path = os.path.join(output_dir, 'optimization_summary.json') # 保存所有结果摘要
summary = {}
for model_name, result in results.items():
summary[model_name] = {
'best_score': result['best_score'],
'n_trials': result['n_trials'],
'best_params': result['best_params']
}
with open(summary_path, 'w', encoding = 'utf-8') as f:
json.dump(summary, f, indent = 2, ensure_ascii = False)
print(f"Optimization summary saved to: {summary_path}")
return manager, results
def print_optimization_summary(results):
if not results:
return
print("\n" + "="*60)
print("Optimization Results Summary")
print("="*60)
sorted_results = sorted(results.items(), key = lambda x: x[1]['best_score'], reverse = True)
for model_name, result in sorted_results:
print(f"\n{model_name.upper()}")
print(f" Best score: {result['best_score']:.6f}")
print(f" Trials: {result['n_trials']}")
print(f" Key parameters:")
key_params = ['learning_rate', 'n_estimators', 'max_depth', 'reg_alpha', 'reg_lambda']
for param in key_params:
if param in result['best_params']:
value = result['best_params'][param]
if isinstance(value, float):
print(f" {param}: {value:.5f}")
else:
print(f" {param}: {value}")
def flow():
args = parse_args()
set_seed(Config.RANDOM_STATE)
os.makedirs(args.output_dir, exist_ok = True)
print(f"Output directory: {args.output_dir}")
X, y = prepare_data(getattr(args, 'sample_ratio', None))
results, manager = None, None
if args.model: # 单模型搜参
result = optimize_single_model(args.model, X, y, args.trials, args.cv_folds, args.output_dir)
if result:
results = {args.model: result}
elif args.all: # 全模型搜参
manager, results = optimize_all_models(X, y, args.trials, args.cv_folds, args.output_dir)
else:
raise ValueError("Please specify --model or --all parameter")
if results:
print_optimization_summary(results)
if args.update_config and manager: # 是否自动更新 Config中的参数
try:
manager.update_config()
print("Config file automatically updated")
except Exception as e:
print(f"Config file update failed: {str(e)}")
print("Please manually copy best parameters to Settings.py")
print(f"\nHyperparameter optimization completed! Results saved in: {args.output_dir}")
if __name__ == "__main__":
flow()
|