Personal_Code / DRW /DRW-Crypto /optimize_params.py
ChanceuxMJ's picture
Upload folder using huggingface_hub
c687548 verified
# -*- coding: utf-8 -*-
# @Time : 2025/7
# @Author : Lukax
# @Email : Lukarxiang@gmail.com
# @File : optimize_params.py
# -*- presentd: PyCharm -*-
import os
import json
import argparse
import pandas as pd
from Utils import set_seed
from Settings import Config
from inplemental import load_data
from HyperparameterOptimizer import HyperparameterManager, quick_optimize_single_model
def parse_args():
parser = argparse.ArgumentParser(description='超参数优化工具')
parser.add_argument('--model', type = str, choices = ['xgb', 'lgb', 'cat', 'rf'], help = '选择要优化的模型')
parser.add_argument('--all', action = 'store_true', help = '优化所有模型')
parser.add_argument('--trials', type = int, default = 200, help = '搜参尝试次数')
parser.add_argument('--cv-folds', type = int, default = 5, help = '交叉验证折数')
parser.add_argument('--sample-ratio', type = float, default = None, help = '数据采样比例,用于快速测试 (默认全量)')
parser.add_argument('--update-config', action = 'store_true', help = '是否自动更新Config文件')
parser.add_argument('--output-dir', type = str, default = os.path.join('results', 'optimization_results'), help = '结果输出目录')
return parser.parse_args()
def prepare_data(sample_ratio = None):
train, test, submission = load_data()
X, y = train[Config.FEATURES].fillna(0).values, train[Config.TARGET].values
if sample_ratio and sample_ratio < 1:
sample_size = int(len(X) * sample_ratio)
print(f"sample ratio {sample_ratio}, num {sample_size}")
indices = pd.Series(range(len(X))).sample(sample_size, random_state = Config.RANDOM_STATE)
X, y = X[indices], y[indices]
return X, y
def optimize_single_model(model_name, X, y, trials, cv_folds, output_dir):
result = quick_optimize_single_model(model_name, X, y, n_trials = trials)
result_path = os.path.join(output_dir, f'{model_name}_optimization_result.json')
with open(result_path, 'w', encoding = 'utf-8') as f:
json.dump({
'model_name': model_name,
'best_params': result['best_params'],
'best_score': result['best_score'],
'n_trials': result['n_trials'],
'optimization_time': str(pd.Timestamp.now())
}, f, indent = 2, ensure_ascii = False)
print(f"{model_name} optimization completed!")
print(f"Results saved to: {result_path}")
return result
def optimize_all_models(X, y, trials, cv_folds, output_dir):
manager = HyperparameterManager()
results = manager.optimize_all_models(X, y, n_trials = trials, cv_folds = cv_folds)
history_path = os.path.join(output_dir, 'optimization_history.png') # 绘制优化历史
manager.plot_optimization_history(history_path)
summary_path = os.path.join(output_dir, 'optimization_summary.json') # 保存所有结果摘要
summary = {}
for model_name, result in results.items():
summary[model_name] = {
'best_score': result['best_score'],
'n_trials': result['n_trials'],
'best_params': result['best_params']
}
with open(summary_path, 'w', encoding = 'utf-8') as f:
json.dump(summary, f, indent = 2, ensure_ascii = False)
print(f"Optimization summary saved to: {summary_path}")
return manager, results
def print_optimization_summary(results):
if not results:
return
print("\n" + "="*60)
print("Optimization Results Summary")
print("="*60)
sorted_results = sorted(results.items(), key = lambda x: x[1]['best_score'], reverse = True)
for model_name, result in sorted_results:
print(f"\n{model_name.upper()}")
print(f" Best score: {result['best_score']:.6f}")
print(f" Trials: {result['n_trials']}")
print(f" Key parameters:")
key_params = ['learning_rate', 'n_estimators', 'max_depth', 'reg_alpha', 'reg_lambda']
for param in key_params:
if param in result['best_params']:
value = result['best_params'][param]
if isinstance(value, float):
print(f" {param}: {value:.5f}")
else:
print(f" {param}: {value}")
def flow():
args = parse_args()
set_seed(Config.RANDOM_STATE)
os.makedirs(args.output_dir, exist_ok = True)
print(f"Output directory: {args.output_dir}")
X, y = prepare_data(getattr(args, 'sample_ratio', None))
results, manager = None, None
if args.model: # 单模型搜参
result = optimize_single_model(args.model, X, y, args.trials, args.cv_folds, args.output_dir)
if result:
results = {args.model: result}
elif args.all: # 全模型搜参
manager, results = optimize_all_models(X, y, args.trials, args.cv_folds, args.output_dir)
else:
raise ValueError("Please specify --model or --all parameter")
if results:
print_optimization_summary(results)
if args.update_config and manager: # 是否自动更新 Config中的参数
try:
manager.update_config()
print("Config file automatically updated")
except Exception as e:
print(f"Config file update failed: {str(e)}")
print("Please manually copy best parameters to Settings.py")
print(f"\nHyperparameter optimization completed! Results saved in: {args.output_dir}")
if __name__ == "__main__":
flow()