boking-cancelation-api / artifacts /cv_metrics.json
j2damax's picture
Deploy app with models and artifacts (force update)
575d22a verified
{
"folds": 5,
"categorical_strategy": "target",
"include_mlp": false,
"results": {
"LogisticRegression": {
"folds": [
{
"accuracy": 0.8070190133176983,
"precision": 0.796804932735426,
"recall": 0.6429217548620534,
"f1_score": 0.711639549436796,
"roc_auc": 0.8867040527525519
},
{
"accuracy": 0.8130496691515202,
"precision": 0.8030994880309948,
"recall": 0.6561899378179763,
"f1_score": 0.7222498755599801,
"roc_auc": 0.890793004589075
},
{
"accuracy": 0.8100762207890108,
"precision": 0.8030089988751407,
"recall": 0.6456755228942905,
"f1_score": 0.7157987090305196,
"roc_auc": 0.889186762553699
},
{
"accuracy": 0.8098249434626016,
"precision": 0.7980609418282548,
"recall": 0.6514414923685699,
"f1_score": 0.7173358232181761,
"roc_auc": 0.8909674615600719
},
{
"accuracy": 0.8133009464779295,
"precision": 0.8094230497954578,
"recall": 0.648728094968909,
"f1_score": 0.7202209112589432,
"roc_auc": 0.8904796972569523
}
],
"aggregate": {
"accuracy_mean": 0.8106541586397521,
"accuracy_std": 0.0025971064646111972,
"precision_mean": 0.8020794822530547,
"precision_std": 0.0049950459694398566,
"recall_mean": 0.6489913605823598,
"recall_std": 0.005141178414132198,
"f1_score_mean": 0.717448973700883,
"f1_score_std": 0.004099325654318532,
"roc_auc_mean": 0.88962619574247,
"roc_auc_std": 0.0017762969131121652
}
},
"RandomForest": {
"folds": [
{
"accuracy": 0.8455063238127146,
"precision": 0.8715583105088655,
"recall": 0.6836273179556762,
"f1_score": 0.7662378809961345,
"roc_auc": 0.926749180073486
},
{
"accuracy": 0.8513275818745288,
"precision": 0.8743107592252227,
"recall": 0.699152063312606,
"f1_score": 0.7769820329187084,
"roc_auc": 0.9279393624961584
},
{
"accuracy": 0.8490242063824441,
"precision": 0.8785033227390927,
"recall": 0.6875070661390617,
"f1_score": 0.7713578994101604,
"roc_auc": 0.9265822313578301
},
{
"accuracy": 0.8504062316776949,
"precision": 0.8647115783649191,
"recall": 0.7067269643866592,
"f1_score": 0.7777777777777778,
"roc_auc": 0.9282198571471385
},
{
"accuracy": 0.8515369796465365,
"precision": 0.8821747908854918,
"recall": 0.6915771622385528,
"f1_score": 0.7753343050890424,
"roc_auc": 0.9290067109566416
}
],
"aggregate": {
"accuracy_mean": 0.8495602646787838,
"accuracy_std": 0.002473270473483476,
"precision_mean": 0.8742517523447184,
"precision_std": 0.006691849909345141,
"recall_mean": 0.6937181148065112,
"recall_std": 0.009270153023255628,
"f1_score_mean": 0.7735379792383648,
"f1_score_std": 0.004772535132748273,
"roc_auc_mean": 0.9276994684062508,
"roc_auc_std": 0.0010232916543342055
}
},
"XGBoost": {
"folds": [
{
"accuracy": 0.859703492754837,
"precision": 0.8373863915499877,
"recall": 0.7709181365897784,
"f1_score": 0.8027787589779819,
"roc_auc": 0.9366735339592387
},
{
"accuracy": 0.8613367953764972,
"precision": 0.8363724775103331,
"recall": 0.7778405879027699,
"f1_score": 0.8060453400503779,
"roc_auc": 0.9374002707516237
},
{
"accuracy": 0.8604992042884664,
"precision": 0.841635687732342,
"recall": 0.7678914641040135,
"f1_score": 0.8030741945019214,
"roc_auc": 0.9368807090577477
},
{
"accuracy": 0.8605829633972695,
"precision": 0.8370813981911513,
"recall": 0.774335782928208,
"f1_score": 0.8044869912491924,
"roc_auc": 0.9380101143228257
},
{
"accuracy": 0.8639333277493928,
"precision": 0.8406379352325298,
"recall": 0.7806670435274167,
"f1_score": 0.8095433495515564,
"roc_auc": 0.9392122068588733
}
],
"aggregate": {
"accuracy_mean": 0.8612111567132926,
"accuracy_std": 0.001627924593816666,
"precision_mean": 0.8386227780432687,
"precision_std": 0.002350895530949529,
"recall_mean": 0.7743306030104373,
"recall_std": 0.005137556231553914,
"f1_score_mean": 0.8051857268662059,
"f1_score_std": 0.0027598960622244877,
"roc_auc_mean": 0.9376353669900619,
"roc_auc_std": 0.0010218562950094605
}
}
},
"timestamp": "2025-10-05T12:26:19.675852+00:00"
}