Spaces:
Sleeping
Sleeping
Duplicate from bharat10/heart_disease_prediction
Browse filesCo-authored-by: Bharat Uttamchandani <bharat10@users.noreply.huggingface.co>
- .gitattributes +34 -0
- README.md +13 -0
- app.py +198 -0
- heart_disease_data.csv +304 -0
- requirements.txt +1 -0
.gitattributes
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Heart Disease Prediction
|
| 3 |
+
emoji: 👁
|
| 4 |
+
colorFrom: gray
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: gradio
|
| 7 |
+
sdk_version: 3.19.1
|
| 8 |
+
app_file: app.py
|
| 9 |
+
pinned: false
|
| 10 |
+
duplicated_from: bharat10/heart_disease_prediction
|
| 11 |
+
---
|
| 12 |
+
|
| 13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import numpy as np
|
| 3 |
+
import pandas as pd
|
| 4 |
+
from sklearn.metrics import confusion_matrix
|
| 5 |
+
from sklearn.model_selection import train_test_split
|
| 6 |
+
from sklearn.tree import DecisionTreeClassifier
|
| 7 |
+
from sklearn.linear_model import LogisticRegression
|
| 8 |
+
from sklearn.metrics import accuracy_score
|
| 9 |
+
from sklearn.metrics import classification_report
|
| 10 |
+
from sklearn.ensemble import RandomForestClassifier
|
| 11 |
+
from sklearn.svm import SVC
|
| 12 |
+
from sklearn.model_selection import RandomizedSearchCV
|
| 13 |
+
from sklearn.preprocessing import StandardScaler
|
| 14 |
+
import matplotlib.pyplot as plt
|
| 15 |
+
# %matplotlib inline
|
| 16 |
+
import io
|
| 17 |
+
|
| 18 |
+
def importdata():
|
| 19 |
+
#balance_data = pd.read_csv(io.BytesIO(uploaded['heart_disease_data.csv']))
|
| 20 |
+
balance_data = pd.read_csv('heart_disease_data.csv')
|
| 21 |
+
# Printing the dataswet shape
|
| 22 |
+
print ("Dataset Length: ", len(balance_data))
|
| 23 |
+
print ("Dataset Shape: ", balance_data.shape)
|
| 24 |
+
|
| 25 |
+
# Printing the dataset obseravtions
|
| 26 |
+
print ("Dataset: ",balance_data.head())
|
| 27 |
+
|
| 28 |
+
return balance_data
|
| 29 |
+
def splitdatasetL(heart_data, input_data):
|
| 30 |
+
X = heart_data.drop(columns='target', axis=1)
|
| 31 |
+
Y = heart_data['target']
|
| 32 |
+
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, stratify=Y, random_state=2)
|
| 33 |
+
model = LogisticRegression()
|
| 34 |
+
model.fit(X_train, Y_train)
|
| 35 |
+
|
| 36 |
+
input_data_as_numpy_array= np.asarray(input_data)
|
| 37 |
+
|
| 38 |
+
# reshape the numpy array as we are predicting for only on instance
|
| 39 |
+
input_data_reshaped = input_data_as_numpy_array.reshape(1,-1)
|
| 40 |
+
|
| 41 |
+
prediction = model.predict(input_data_reshaped)
|
| 42 |
+
|
| 43 |
+
return prediction[0]
|
| 44 |
+
def splitdataset(balance_data):
|
| 45 |
+
|
| 46 |
+
# Separating the target variable
|
| 47 |
+
X = balance_data.values[:, 0:13]
|
| 48 |
+
Y = balance_data.values[:, 13]
|
| 49 |
+
|
| 50 |
+
# Splitting the dataset into train and test
|
| 51 |
+
X_train, X_test, y_train, y_test = train_test_split(
|
| 52 |
+
X, Y, test_size = 0.3, random_state = 100)
|
| 53 |
+
|
| 54 |
+
return X, Y, X_train, X_test, y_train, y_test
|
| 55 |
+
def train_using_gini(X_train, X_test, y_train):
|
| 56 |
+
|
| 57 |
+
clf_gini = DecisionTreeClassifier(criterion = "gini",
|
| 58 |
+
random_state = 100,max_depth=3, min_samples_leaf=5)
|
| 59 |
+
|
| 60 |
+
clf_gini.fit(X_train, y_train)
|
| 61 |
+
return clf_gini
|
| 62 |
+
|
| 63 |
+
def tarin_using_entropy(X_train, X_test, y_train):
|
| 64 |
+
|
| 65 |
+
clf_entropy = DecisionTreeClassifier(
|
| 66 |
+
criterion = "entropy", random_state = 100,
|
| 67 |
+
max_depth = 3, min_samples_leaf = 5)
|
| 68 |
+
|
| 69 |
+
clf_entropy.fit(X_train, y_train)
|
| 70 |
+
return clf_entropy
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
# Function to make predictions
|
| 74 |
+
def prediction(X_test, clf_object):
|
| 75 |
+
|
| 76 |
+
# Predicton on test with giniIndex
|
| 77 |
+
y_pred = clf_object.predict(X_test)
|
| 78 |
+
print("Predicted values:")
|
| 79 |
+
print(y_pred)
|
| 80 |
+
return y_pred
|
| 81 |
+
def RandomF(X_train, y_train, X_test):
|
| 82 |
+
rf_clf = RandomForestClassifier(n_estimators=1000, random_state=42)
|
| 83 |
+
rf_clf.fit(X_train, y_train)
|
| 84 |
+
pred = rf_clf.predict(X_test)
|
| 85 |
+
return pred
|
| 86 |
+
def SBM(df, X_test):
|
| 87 |
+
X = df.drop('target', axis=1)
|
| 88 |
+
y = df['target']
|
| 89 |
+
X_train, X_T, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
|
| 90 |
+
scaler = StandardScaler()
|
| 91 |
+
X_train_scaled = scaler.fit_transform(X_train)
|
| 92 |
+
X_test_scaled = scaler.fit_transform(X_test)
|
| 93 |
+
svm = SVC(kernel='rbf', gamma=0.1)
|
| 94 |
+
svm.fit(X_train_scaled, y_train)
|
| 95 |
+
y_pred = svm.predict(X_test_scaled)
|
| 96 |
+
return y_pred
|
| 97 |
+
|
| 98 |
+
def SBF(new_data):
|
| 99 |
+
df = pd.read_csv('heart_disease_data.csv')
|
| 100 |
+
X = df.drop('target', axis=1)
|
| 101 |
+
y = df['target']
|
| 102 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=5)
|
| 103 |
+
svm = SVC(kernel='linear')
|
| 104 |
+
svm.fit(X_train, y_train)
|
| 105 |
+
y_pred = svm.predict(new_data)
|
| 106 |
+
print(y_pred)
|
| 107 |
+
print(y_pred[0])
|
| 108 |
+
print("MEasdasdaGASDASD")
|
| 109 |
+
return y_pred[0]
|
| 110 |
+
|
| 111 |
+
def heart(age, gender, chestpaintype, restingbloodpressure, serumcholestrol, fastingbloodsugar, resting_ecg_result, maximumheartrate, exerciseinduced_angina, oldpeak, slope, ca, thal):
|
| 112 |
+
data = importdata()
|
| 113 |
+
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
|
| 114 |
+
clf_gini = train_using_gini(X_train, X_test, y_train)
|
| 115 |
+
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
|
| 116 |
+
|
| 117 |
+
fbs = 1 if fastingbloodsugar > 120 else 0
|
| 118 |
+
g = 0 if gender == "Female" else 1
|
| 119 |
+
exang = 0 if exerciseinduced_angina == "No" else 1
|
| 120 |
+
cp = 0
|
| 121 |
+
if chestpaintype == "Typical Angina" :
|
| 122 |
+
cp = 0
|
| 123 |
+
elif chestpaintype == "Non Typical Angina" :
|
| 124 |
+
cp = 1
|
| 125 |
+
elif chestpaintype == "Non Anginal Pain" :
|
| 126 |
+
cp = 2
|
| 127 |
+
else :
|
| 128 |
+
cp = 3
|
| 129 |
+
ecg = 0
|
| 130 |
+
if resting_ecg_result == "0 - Nothing to note" :
|
| 131 |
+
ecg = 0
|
| 132 |
+
elif resting_ecg_result == "1 - ST-T abnormality" :
|
| 133 |
+
ecg = 1
|
| 134 |
+
else :
|
| 135 |
+
ecg = 2
|
| 136 |
+
|
| 137 |
+
XX = np.array([age, g, cp, restingbloodpressure, serumcholestrol, fbs, ecg, maximumheartrate, exang, oldpeak, slope, ca, thal])
|
| 138 |
+
X_test[1][0] = age
|
| 139 |
+
X_test[1][1] = g
|
| 140 |
+
X_test[1][2] = cp
|
| 141 |
+
X_test[1][3] = restingbloodpressure
|
| 142 |
+
X_test[1][4] = serumcholestrol
|
| 143 |
+
X_test[1][5] = fbs
|
| 144 |
+
X_test[1][6] = ecg
|
| 145 |
+
X_test[1][7] = maximumheartrate
|
| 146 |
+
X_test[1][8] = exang
|
| 147 |
+
X_test[1][9] = oldpeak
|
| 148 |
+
X_test[1][10] = slope
|
| 149 |
+
X_test[1][11] = ca
|
| 150 |
+
X_test[1][12] = thal
|
| 151 |
+
new_data = pd.DataFrame({'age':[age],'sex':[g],'cp':[cp],'trestbps':[restingbloodpressure],
|
| 152 |
+
'chol': [serumcholestrol],'fbs':[fbs],'restecg': [ecg],
|
| 153 |
+
'thalach':[maximumheartrate],'exang':[exang],'oldpeak': [oldpeak],
|
| 154 |
+
'slope':[slope], 'ca':[ca], 'thal':[thal]})
|
| 155 |
+
y_pred_gini = prediction(X_test, clf_gini)
|
| 156 |
+
k = RandomF(X_train, y_train, X_test)
|
| 157 |
+
#m = SBM(data, new_data)
|
| 158 |
+
m = SBF(new_data)
|
| 159 |
+
print("ASDASDASDADS")
|
| 160 |
+
print(type(m))
|
| 161 |
+
#m = 0
|
| 162 |
+
pred = splitdatasetL(data, XX)
|
| 163 |
+
if y_pred_gini[1] == 1.0:
|
| 164 |
+
SD = "Based on our Decision Tree Machine Learning model which has an accuracy of 82.42%, you have high chances of having heart disease"
|
| 165 |
+
else:
|
| 166 |
+
SD = "Based on our Decision Tree Machine Learning model which has an accuracy of 82.42%, you are less likely to have heart disease"
|
| 167 |
+
if pred == 1:
|
| 168 |
+
SL = "Based on our Logistic Regression Machine Learning model which has an accuracy of 81.97%, you have high chances of having heart disease"
|
| 169 |
+
else:
|
| 170 |
+
SL = "Based on our Logistic Regression Machine Learning model which has an accuracy of 81.97%, you are less likely to have heart disease"
|
| 171 |
+
if k[1] == 1:
|
| 172 |
+
SR = "Based on our Random Forest Machine Learning model which has an accuracy of 82.42%, you have high chances of having heart disease"
|
| 173 |
+
else:
|
| 174 |
+
SR = "Based on our Random Forest Machine Learning model which has an accuracy of 82.42%, you are less likely to have heart disease"
|
| 175 |
+
if m == 1:
|
| 176 |
+
SS = "Based on our SVM Machine Learning model which has an accuracy of 89.01%, you have high chances of having heart disease"
|
| 177 |
+
else:
|
| 178 |
+
SS = "Based on our SVM Machine Learning model which has an accuracy of 89.01%, you are less likely to have heart disease"
|
| 179 |
+
|
| 180 |
+
models = ['Logistic Regression','Decision Tree','SVM','Random Forest']
|
| 181 |
+
accuracies = [81.97,82.42,89.01,82.42]
|
| 182 |
+
|
| 183 |
+
fig, ax = plt.subplots(figsize = (40,40))
|
| 184 |
+
ax.bar(models, accuracies)
|
| 185 |
+
ax.set_xlabel('Models')
|
| 186 |
+
ax.set_ylabel('Accuracy')
|
| 187 |
+
ax.set_title('Machine Learning Models Accuracy')
|
| 188 |
+
|
| 189 |
+
return SL, SD, SS, SR, fig
|
| 190 |
+
|
| 191 |
+
interface = gr.Interface(
|
| 192 |
+
fn=heart,
|
| 193 |
+
inputs=["number", gr.Radio(["Male", "Female"]),gr.Dropdown(["Typical Angina", "Non Typical Angina", "Non Anginal Pain", "Asymptomatic"]), "number", "number", "number", gr.Dropdown(["0 - Nothing to note", "1 - ST-T abnormality", "2 - Possible or definite left ventricular hypertrophy"]), "number", gr.Radio(["No", "Yes"]), "number" , "number", "number", "number"],
|
| 194 |
+
outputs=[gr.outputs.Label(label="Logistic Regression", type="text"),gr.outputs.Label(label="Decision Tree", type="auto"),gr.outputs.Label(label="Random Forest", type="text"),gr.outputs.Label(label="SVM", type="auto"),"plot"],
|
| 195 |
+
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
interface.launch()
|
heart_disease_data.csv
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
age,sex,cp,trestbps,chol,fbs,restecg,thalach,exang,oldpeak,slope,ca,thal,target
|
| 2 |
+
63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
|
| 3 |
+
37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
|
| 4 |
+
41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
|
| 5 |
+
56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
|
| 6 |
+
57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
|
| 7 |
+
57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
|
| 8 |
+
56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
|
| 9 |
+
44,1,1,120,263,0,1,173,0,0,2,0,3,1
|
| 10 |
+
52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
|
| 11 |
+
57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
|
| 12 |
+
54,1,0,140,239,0,1,160,0,1.2,2,0,2,1
|
| 13 |
+
48,0,2,130,275,0,1,139,0,0.2,2,0,2,1
|
| 14 |
+
49,1,1,130,266,0,1,171,0,0.6,2,0,2,1
|
| 15 |
+
64,1,3,110,211,0,0,144,1,1.8,1,0,2,1
|
| 16 |
+
58,0,3,150,283,1,0,162,0,1,2,0,2,1
|
| 17 |
+
50,0,2,120,219,0,1,158,0,1.6,1,0,2,1
|
| 18 |
+
58,0,2,120,340,0,1,172,0,0,2,0,2,1
|
| 19 |
+
66,0,3,150,226,0,1,114,0,2.6,0,0,2,1
|
| 20 |
+
43,1,0,150,247,0,1,171,0,1.5,2,0,2,1
|
| 21 |
+
69,0,3,140,239,0,1,151,0,1.8,2,2,2,1
|
| 22 |
+
59,1,0,135,234,0,1,161,0,0.5,1,0,3,1
|
| 23 |
+
44,1,2,130,233,0,1,179,1,0.4,2,0,2,1
|
| 24 |
+
42,1,0,140,226,0,1,178,0,0,2,0,2,1
|
| 25 |
+
61,1,2,150,243,1,1,137,1,1,1,0,2,1
|
| 26 |
+
40,1,3,140,199,0,1,178,1,1.4,2,0,3,1
|
| 27 |
+
71,0,1,160,302,0,1,162,0,0.4,2,2,2,1
|
| 28 |
+
59,1,2,150,212,1,1,157,0,1.6,2,0,2,1
|
| 29 |
+
51,1,2,110,175,0,1,123,0,0.6,2,0,2,1
|
| 30 |
+
65,0,2,140,417,1,0,157,0,0.8,2,1,2,1
|
| 31 |
+
53,1,2,130,197,1,0,152,0,1.2,0,0,2,1
|
| 32 |
+
41,0,1,105,198,0,1,168,0,0,2,1,2,1
|
| 33 |
+
65,1,0,120,177,0,1,140,0,0.4,2,0,3,1
|
| 34 |
+
44,1,1,130,219,0,0,188,0,0,2,0,2,1
|
| 35 |
+
54,1,2,125,273,0,0,152,0,0.5,0,1,2,1
|
| 36 |
+
51,1,3,125,213,0,0,125,1,1.4,2,1,2,1
|
| 37 |
+
46,0,2,142,177,0,0,160,1,1.4,0,0,2,1
|
| 38 |
+
54,0,2,135,304,1,1,170,0,0,2,0,2,1
|
| 39 |
+
54,1,2,150,232,0,0,165,0,1.6,2,0,3,1
|
| 40 |
+
65,0,2,155,269,0,1,148,0,0.8,2,0,2,1
|
| 41 |
+
65,0,2,160,360,0,0,151,0,0.8,2,0,2,1
|
| 42 |
+
51,0,2,140,308,0,0,142,0,1.5,2,1,2,1
|
| 43 |
+
48,1,1,130,245,0,0,180,0,0.2,1,0,2,1
|
| 44 |
+
45,1,0,104,208,0,0,148,1,3,1,0,2,1
|
| 45 |
+
53,0,0,130,264,0,0,143,0,0.4,1,0,2,1
|
| 46 |
+
39,1,2,140,321,0,0,182,0,0,2,0,2,1
|
| 47 |
+
52,1,1,120,325,0,1,172,0,0.2,2,0,2,1
|
| 48 |
+
44,1,2,140,235,0,0,180,0,0,2,0,2,1
|
| 49 |
+
47,1,2,138,257,0,0,156,0,0,2,0,2,1
|
| 50 |
+
53,0,2,128,216,0,0,115,0,0,2,0,0,1
|
| 51 |
+
53,0,0,138,234,0,0,160,0,0,2,0,2,1
|
| 52 |
+
51,0,2,130,256,0,0,149,0,0.5,2,0,2,1
|
| 53 |
+
66,1,0,120,302,0,0,151,0,0.4,1,0,2,1
|
| 54 |
+
62,1,2,130,231,0,1,146,0,1.8,1,3,3,1
|
| 55 |
+
44,0,2,108,141,0,1,175,0,0.6,1,0,2,1
|
| 56 |
+
63,0,2,135,252,0,0,172,0,0,2,0,2,1
|
| 57 |
+
52,1,1,134,201,0,1,158,0,0.8,2,1,2,1
|
| 58 |
+
48,1,0,122,222,0,0,186,0,0,2,0,2,1
|
| 59 |
+
45,1,0,115,260,0,0,185,0,0,2,0,2,1
|
| 60 |
+
34,1,3,118,182,0,0,174,0,0,2,0,2,1
|
| 61 |
+
57,0,0,128,303,0,0,159,0,0,2,1,2,1
|
| 62 |
+
71,0,2,110,265,1,0,130,0,0,2,1,2,1
|
| 63 |
+
54,1,1,108,309,0,1,156,0,0,2,0,3,1
|
| 64 |
+
52,1,3,118,186,0,0,190,0,0,1,0,1,1
|
| 65 |
+
41,1,1,135,203,0,1,132,0,0,1,0,1,1
|
| 66 |
+
58,1,2,140,211,1,0,165,0,0,2,0,2,1
|
| 67 |
+
35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
|
| 68 |
+
51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
|
| 69 |
+
45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
|
| 70 |
+
44,1,1,120,220,0,1,170,0,0,2,0,2,1
|
| 71 |
+
62,0,0,124,209,0,1,163,0,0,2,0,2,1
|
| 72 |
+
54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
|
| 73 |
+
51,1,2,94,227,0,1,154,1,0,2,1,3,1
|
| 74 |
+
29,1,1,130,204,0,0,202,0,0,2,0,2,1
|
| 75 |
+
51,1,0,140,261,0,0,186,1,0,2,0,2,1
|
| 76 |
+
43,0,2,122,213,0,1,165,0,0.2,1,0,2,1
|
| 77 |
+
55,0,1,135,250,0,0,161,0,1.4,1,0,2,1
|
| 78 |
+
51,1,2,125,245,1,0,166,0,2.4,1,0,2,1
|
| 79 |
+
59,1,1,140,221,0,1,164,1,0,2,0,2,1
|
| 80 |
+
52,1,1,128,205,1,1,184,0,0,2,0,2,1
|
| 81 |
+
58,1,2,105,240,0,0,154,1,0.6,1,0,3,1
|
| 82 |
+
41,1,2,112,250,0,1,179,0,0,2,0,2,1
|
| 83 |
+
45,1,1,128,308,0,0,170,0,0,2,0,2,1
|
| 84 |
+
60,0,2,102,318,0,1,160,0,0,2,1,2,1
|
| 85 |
+
52,1,3,152,298,1,1,178,0,1.2,1,0,3,1
|
| 86 |
+
42,0,0,102,265,0,0,122,0,0.6,1,0,2,1
|
| 87 |
+
67,0,2,115,564,0,0,160,0,1.6,1,0,3,1
|
| 88 |
+
68,1,2,118,277,0,1,151,0,1,2,1,3,1
|
| 89 |
+
46,1,1,101,197,1,1,156,0,0,2,0,3,1
|
| 90 |
+
54,0,2,110,214,0,1,158,0,1.6,1,0,2,1
|
| 91 |
+
58,0,0,100,248,0,0,122,0,1,1,0,2,1
|
| 92 |
+
48,1,2,124,255,1,1,175,0,0,2,2,2,1
|
| 93 |
+
57,1,0,132,207,0,1,168,1,0,2,0,3,1
|
| 94 |
+
52,1,2,138,223,0,1,169,0,0,2,4,2,1
|
| 95 |
+
54,0,1,132,288,1,0,159,1,0,2,1,2,1
|
| 96 |
+
45,0,1,112,160,0,1,138,0,0,1,0,2,1
|
| 97 |
+
53,1,0,142,226,0,0,111,1,0,2,0,3,1
|
| 98 |
+
62,0,0,140,394,0,0,157,0,1.2,1,0,2,1
|
| 99 |
+
52,1,0,108,233,1,1,147,0,0.1,2,3,3,1
|
| 100 |
+
43,1,2,130,315,0,1,162,0,1.9,2,1,2,1
|
| 101 |
+
53,1,2,130,246,1,0,173,0,0,2,3,2,1
|
| 102 |
+
42,1,3,148,244,0,0,178,0,0.8,2,2,2,1
|
| 103 |
+
59,1,3,178,270,0,0,145,0,4.2,0,0,3,1
|
| 104 |
+
63,0,1,140,195,0,1,179,0,0,2,2,2,1
|
| 105 |
+
42,1,2,120,240,1,1,194,0,0.8,0,0,3,1
|
| 106 |
+
50,1,2,129,196,0,1,163,0,0,2,0,2,1
|
| 107 |
+
68,0,2,120,211,0,0,115,0,1.5,1,0,2,1
|
| 108 |
+
69,1,3,160,234,1,0,131,0,0.1,1,1,2,1
|
| 109 |
+
45,0,0,138,236,0,0,152,1,0.2,1,0,2,1
|
| 110 |
+
50,0,1,120,244,0,1,162,0,1.1,2,0,2,1
|
| 111 |
+
50,0,0,110,254,0,0,159,0,0,2,0,2,1
|
| 112 |
+
64,0,0,180,325,0,1,154,1,0,2,0,2,1
|
| 113 |
+
57,1,2,150,126,1,1,173,0,0.2,2,1,3,1
|
| 114 |
+
64,0,2,140,313,0,1,133,0,0.2,2,0,3,1
|
| 115 |
+
43,1,0,110,211,0,1,161,0,0,2,0,3,1
|
| 116 |
+
55,1,1,130,262,0,1,155,0,0,2,0,2,1
|
| 117 |
+
37,0,2,120,215,0,1,170,0,0,2,0,2,1
|
| 118 |
+
41,1,2,130,214,0,0,168,0,2,1,0,2,1
|
| 119 |
+
56,1,3,120,193,0,0,162,0,1.9,1,0,3,1
|
| 120 |
+
46,0,1,105,204,0,1,172,0,0,2,0,2,1
|
| 121 |
+
46,0,0,138,243,0,0,152,1,0,1,0,2,1
|
| 122 |
+
64,0,0,130,303,0,1,122,0,2,1,2,2,1
|
| 123 |
+
59,1,0,138,271,0,0,182,0,0,2,0,2,1
|
| 124 |
+
41,0,2,112,268,0,0,172,1,0,2,0,2,1
|
| 125 |
+
54,0,2,108,267,0,0,167,0,0,2,0,2,1
|
| 126 |
+
39,0,2,94,199,0,1,179,0,0,2,0,2,1
|
| 127 |
+
34,0,1,118,210,0,1,192,0,0.7,2,0,2,1
|
| 128 |
+
47,1,0,112,204,0,1,143,0,0.1,2,0,2,1
|
| 129 |
+
67,0,2,152,277,0,1,172,0,0,2,1,2,1
|
| 130 |
+
52,0,2,136,196,0,0,169,0,0.1,1,0,2,1
|
| 131 |
+
74,0,1,120,269,0,0,121,1,0.2,2,1,2,1
|
| 132 |
+
54,0,2,160,201,0,1,163,0,0,2,1,2,1
|
| 133 |
+
49,0,1,134,271,0,1,162,0,0,1,0,2,1
|
| 134 |
+
42,1,1,120,295,0,1,162,0,0,2,0,2,1
|
| 135 |
+
41,1,1,110,235,0,1,153,0,0,2,0,2,1
|
| 136 |
+
41,0,1,126,306,0,1,163,0,0,2,0,2,1
|
| 137 |
+
49,0,0,130,269,0,1,163,0,0,2,0,2,1
|
| 138 |
+
60,0,2,120,178,1,1,96,0,0,2,0,2,1
|
| 139 |
+
62,1,1,128,208,1,0,140,0,0,2,0,2,1
|
| 140 |
+
57,1,0,110,201,0,1,126,1,1.5,1,0,1,1
|
| 141 |
+
64,1,0,128,263,0,1,105,1,0.2,1,1,3,1
|
| 142 |
+
51,0,2,120,295,0,0,157,0,0.6,2,0,2,1
|
| 143 |
+
43,1,0,115,303,0,1,181,0,1.2,1,0,2,1
|
| 144 |
+
42,0,2,120,209,0,1,173,0,0,1,0,2,1
|
| 145 |
+
67,0,0,106,223,0,1,142,0,0.3,2,2,2,1
|
| 146 |
+
76,0,2,140,197,0,2,116,0,1.1,1,0,2,1
|
| 147 |
+
70,1,1,156,245,0,0,143,0,0,2,0,2,1
|
| 148 |
+
44,0,2,118,242,0,1,149,0,0.3,1,1,2,1
|
| 149 |
+
60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
|
| 150 |
+
44,1,2,120,226,0,1,169,0,0,2,0,2,1
|
| 151 |
+
42,1,2,130,180,0,1,150,0,0,2,0,2,1
|
| 152 |
+
66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
|
| 153 |
+
71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
|
| 154 |
+
64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
|
| 155 |
+
66,0,2,146,278,0,0,152,0,0,1,1,2,1
|
| 156 |
+
39,0,2,138,220,0,1,152,0,0,1,0,2,1
|
| 157 |
+
58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
|
| 158 |
+
47,1,2,130,253,0,1,179,0,0,2,0,2,1
|
| 159 |
+
35,1,1,122,192,0,1,174,0,0,2,0,2,1
|
| 160 |
+
58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
|
| 161 |
+
56,1,1,130,221,0,0,163,0,0,2,0,3,1
|
| 162 |
+
56,1,1,120,240,0,1,169,0,0,0,0,2,1
|
| 163 |
+
55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
|
| 164 |
+
41,1,1,120,157,0,1,182,0,0,2,0,2,1
|
| 165 |
+
38,1,2,138,175,0,1,173,0,0,2,4,2,1
|
| 166 |
+
38,1,2,138,175,0,1,173,0,0,2,4,2,1
|
| 167 |
+
67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
|
| 168 |
+
67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
|
| 169 |
+
62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
|
| 170 |
+
63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
|
| 171 |
+
53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
|
| 172 |
+
56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
|
| 173 |
+
48,1,1,110,229,0,1,168,0,1,0,0,3,0
|
| 174 |
+
58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
|
| 175 |
+
58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
|
| 176 |
+
60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
|
| 177 |
+
40,1,0,110,167,0,0,114,1,2,1,0,3,0
|
| 178 |
+
60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
|
| 179 |
+
64,1,2,140,335,0,1,158,0,0,2,0,2,0
|
| 180 |
+
43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
|
| 181 |
+
57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
|
| 182 |
+
55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
|
| 183 |
+
65,0,0,150,225,0,0,114,0,1,1,3,3,0
|
| 184 |
+
61,0,0,130,330,0,0,169,0,0,2,0,2,0
|
| 185 |
+
58,1,2,112,230,0,0,165,0,2.5,1,1,3,0
|
| 186 |
+
50,1,0,150,243,0,0,128,0,2.6,1,0,3,0
|
| 187 |
+
44,1,0,112,290,0,0,153,0,0,2,1,2,0
|
| 188 |
+
60,1,0,130,253,0,1,144,1,1.4,2,1,3,0
|
| 189 |
+
54,1,0,124,266,0,0,109,1,2.2,1,1,3,0
|
| 190 |
+
50,1,2,140,233,0,1,163,0,0.6,1,1,3,0
|
| 191 |
+
41,1,0,110,172,0,0,158,0,0,2,0,3,0
|
| 192 |
+
51,0,0,130,305,0,1,142,1,1.2,1,0,3,0
|
| 193 |
+
58,1,0,128,216,0,0,131,1,2.2,1,3,3,0
|
| 194 |
+
54,1,0,120,188,0,1,113,0,1.4,1,1,3,0
|
| 195 |
+
60,1,0,145,282,0,0,142,1,2.8,1,2,3,0
|
| 196 |
+
60,1,2,140,185,0,0,155,0,3,1,0,2,0
|
| 197 |
+
59,1,0,170,326,0,0,140,1,3.4,0,0,3,0
|
| 198 |
+
46,1,2,150,231,0,1,147,0,3.6,1,0,2,0
|
| 199 |
+
67,1,0,125,254,1,1,163,0,0.2,1,2,3,0
|
| 200 |
+
62,1,0,120,267,0,1,99,1,1.8,1,2,3,0
|
| 201 |
+
65,1,0,110,248,0,0,158,0,0.6,2,2,1,0
|
| 202 |
+
44,1,0,110,197,0,0,177,0,0,2,1,2,0
|
| 203 |
+
60,1,0,125,258,0,0,141,1,2.8,1,1,3,0
|
| 204 |
+
58,1,0,150,270,0,0,111,1,0.8,2,0,3,0
|
| 205 |
+
68,1,2,180,274,1,0,150,1,1.6,1,0,3,0
|
| 206 |
+
62,0,0,160,164,0,0,145,0,6.2,0,3,3,0
|
| 207 |
+
52,1,0,128,255,0,1,161,1,0,2,1,3,0
|
| 208 |
+
59,1,0,110,239,0,0,142,1,1.2,1,1,3,0
|
| 209 |
+
60,0,0,150,258,0,0,157,0,2.6,1,2,3,0
|
| 210 |
+
49,1,2,120,188,0,1,139,0,2,1,3,3,0
|
| 211 |
+
59,1,0,140,177,0,1,162,1,0,2,1,3,0
|
| 212 |
+
57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
|
| 213 |
+
61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
|
| 214 |
+
39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
|
| 215 |
+
61,0,0,145,307,0,0,146,1,1,1,0,3,0
|
| 216 |
+
56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
|
| 217 |
+
43,0,0,132,341,1,0,136,1,3,1,0,3,0
|
| 218 |
+
62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
|
| 219 |
+
63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
|
| 220 |
+
65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
|
| 221 |
+
48,1,0,130,256,1,0,150,1,0,2,2,3,0
|
| 222 |
+
63,0,0,150,407,0,0,154,0,4,1,3,3,0
|
| 223 |
+
55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
|
| 224 |
+
65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
|
| 225 |
+
56,0,0,200,288,1,0,133,1,4,0,2,3,0
|
| 226 |
+
54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
|
| 227 |
+
70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
|
| 228 |
+
62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
|
| 229 |
+
35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
|
| 230 |
+
59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
|
| 231 |
+
64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
|
| 232 |
+
47,1,2,108,243,0,1,152,0,0,2,0,2,0
|
| 233 |
+
57,1,0,165,289,1,0,124,0,1,1,3,3,0
|
| 234 |
+
55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
|
| 235 |
+
64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
|
| 236 |
+
70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
|
| 237 |
+
51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
|
| 238 |
+
58,1,0,125,300,0,0,171,0,0,2,2,3,0
|
| 239 |
+
60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
|
| 240 |
+
77,1,0,125,304,0,0,162,1,0,2,3,2,0
|
| 241 |
+
35,1,0,126,282,0,0,156,1,0,2,0,3,0
|
| 242 |
+
70,1,2,160,269,0,1,112,1,2.9,1,1,3,0
|
| 243 |
+
59,0,0,174,249,0,1,143,1,0,1,0,2,0
|
| 244 |
+
64,1,0,145,212,0,0,132,0,2,1,2,1,0
|
| 245 |
+
57,1,0,152,274,0,1,88,1,1.2,1,1,3,0
|
| 246 |
+
56,1,0,132,184,0,0,105,1,2.1,1,1,1,0
|
| 247 |
+
48,1,0,124,274,0,0,166,0,0.5,1,0,3,0
|
| 248 |
+
56,0,0,134,409,0,0,150,1,1.9,1,2,3,0
|
| 249 |
+
66,1,1,160,246,0,1,120,1,0,1,3,1,0
|
| 250 |
+
54,1,1,192,283,0,0,195,0,0,2,1,3,0
|
| 251 |
+
69,1,2,140,254,0,0,146,0,2,1,3,3,0
|
| 252 |
+
51,1,0,140,298,0,1,122,1,4.2,1,3,3,0
|
| 253 |
+
43,1,0,132,247,1,0,143,1,0.1,1,4,3,0
|
| 254 |
+
62,0,0,138,294,1,1,106,0,1.9,1,3,2,0
|
| 255 |
+
67,1,0,100,299,0,0,125,1,0.9,1,2,2,0
|
| 256 |
+
59,1,3,160,273,0,0,125,0,0,2,0,2,0
|
| 257 |
+
45,1,0,142,309,0,0,147,1,0,1,3,3,0
|
| 258 |
+
58,1,0,128,259,0,0,130,1,3,1,2,3,0
|
| 259 |
+
50,1,0,144,200,0,0,126,1,0.9,1,0,3,0
|
| 260 |
+
62,0,0,150,244,0,1,154,1,1.4,1,0,2,0
|
| 261 |
+
38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
|
| 262 |
+
66,0,0,178,228,1,1,165,1,1,1,2,3,0
|
| 263 |
+
52,1,0,112,230,0,1,160,0,0,2,1,2,0
|
| 264 |
+
53,1,0,123,282,0,1,95,1,2,1,2,3,0
|
| 265 |
+
63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
|
| 266 |
+
54,1,0,110,206,0,0,108,1,0,1,1,2,0
|
| 267 |
+
66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
|
| 268 |
+
55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
|
| 269 |
+
49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
|
| 270 |
+
54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
|
| 271 |
+
56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
|
| 272 |
+
46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
|
| 273 |
+
61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
|
| 274 |
+
67,1,0,120,237,0,1,71,0,1,1,0,2,0
|
| 275 |
+
58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
|
| 276 |
+
47,1,0,110,275,0,0,118,1,1,1,1,2,0
|
| 277 |
+
52,1,0,125,212,0,1,168,0,1,2,2,3,0
|
| 278 |
+
58,1,0,146,218,0,1,105,0,2,1,1,3,0
|
| 279 |
+
57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
|
| 280 |
+
58,0,1,136,319,1,0,152,0,0,2,2,2,0
|
| 281 |
+
61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
|
| 282 |
+
42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
|
| 283 |
+
52,1,0,128,204,1,1,156,1,1,1,0,0,0
|
| 284 |
+
59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
|
| 285 |
+
40,1,0,152,223,0,1,181,0,0,2,0,3,0
|
| 286 |
+
61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
|
| 287 |
+
46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
|
| 288 |
+
59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
|
| 289 |
+
57,1,1,154,232,0,0,164,0,0,2,1,2,0
|
| 290 |
+
57,1,0,110,335,0,1,143,1,3,1,1,3,0
|
| 291 |
+
55,0,0,128,205,0,2,130,1,2,1,1,3,0
|
| 292 |
+
61,1,0,148,203,0,1,161,0,0,2,1,3,0
|
| 293 |
+
58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
|
| 294 |
+
58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
|
| 295 |
+
67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
|
| 296 |
+
44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
|
| 297 |
+
63,1,0,140,187,0,0,144,1,4,2,2,3,0
|
| 298 |
+
63,0,0,124,197,0,1,136,1,0,1,0,2,0
|
| 299 |
+
59,1,0,164,176,1,0,90,0,1,1,2,1,0
|
| 300 |
+
57,0,0,140,241,0,1,123,1,0.2,1,0,3,0
|
| 301 |
+
45,1,3,110,264,0,1,132,0,1.2,1,0,3,0
|
| 302 |
+
68,1,0,144,193,1,1,141,0,3.4,1,2,3,0
|
| 303 |
+
57,1,0,130,131,0,1,115,1,1.2,1,1,3,0
|
| 304 |
+
57,0,1,130,236,0,0,174,0,0,1,1,2,0
|
requirements.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
scikit-learn
|