File size: 1,316 Bytes
d7e53e8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 | import numpy as np
from sklearn.metrics import (
mean_absolute_error,
mean_squared_error,
r2_score,
accuracy_score,
precision_score,
recall_score,
f1_score,
roc_auc_score,
)
from sklearn.utils.multiclass import type_of_target
def regression_metrics(y_true, preds):
return {
"MAE": mean_absolute_error(y_true, preds),
"MSE": mean_squared_error(y_true, preds),
"RMSE": np.sqrt(mean_squared_error(y_true, preds)),
"R²": r2_score(y_true, preds),
}
def classification_metrics(pipeline, X_test, y_test, preds):
metrics = {
"Accuracy": accuracy_score(y_test, preds),
"Precision": precision_score(y_test, preds, average="weighted"),
"Recall": recall_score(y_test, preds, average="weighted"),
"F1 Score": f1_score(y_test, preds, average="weighted"),
}
if hasattr(pipeline.named_steps["model"], "predict_proba"):
probs = pipeline.predict_proba(X_test)
target_type = type_of_target(y_test)
if target_type == "binary":
metrics["ROC-AUC"] = roc_auc_score(y_test, probs[:, 1])
elif target_type == "multiclass":
metrics["ROC-AUC"] = roc_auc_score(
y_test, probs, multi_class="ovr", average="weighted"
)
return metrics
|