varshitha22 commited on
Commit
0a51192
·
verified ·
1 Parent(s): d7895c3

Create DecisionBoundaries_LearningCurves_Algorithms.py

Browse files
DecisionBoundaries_LearningCurves_Algorithms.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import seaborn as sns
5
+ from sklearn.datasets import make_classification, make_moons, make_circles, make_blobs
6
+ from sklearn.model_selection import train_test_split, learning_curve
7
+ from sklearn.neighbors import KNeighborsClassifier
8
+ from sklearn.tree import DecisionTreeClassifier
9
+ from sklearn.naive_bayes import GaussianNB
10
+ from sklearn.linear_model import LogisticRegression
11
+ from sklearn.svm import SVC
12
+ from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, roc_auc_score
13
+ from mlxtend.plotting import plot_decision_regions
14
+
15
+ # Image
16
+ st.image("https://huggingface.co/spaces/varshitha22/KNN/resolve/main/logo.png")
17
+ st.markdown("<br>", unsafe_allow_html=True)
18
+
19
+ # Sidebar for dataset selection
20
+ st.sidebar.header("Dataset Options")
21
+ data_type = st.sidebar.selectbox("Select Data Type:", ["Blobs", "Circles", "Moons", "Classification"])
22
+ noise = st.sidebar.slider("Add Noise:", 0.0, 1.0, 0.2, step=0.05)
23
+
24
+ # Sidebar for model selection
25
+ st.sidebar.header("Model Selection")
26
+ model_name = st.sidebar.radio("Choose a Model:", ["KNN", "Decision Tree", "Naive Bayes", "Logistic Regression", "SVC"])
27
+
28
+ # Display number of neighbors selector only if KNN is selected
29
+ neighbors = None
30
+ if model_name == "KNN":
31
+ neighbors = st.sidebar.number_input("Neighbors", min_value=1, max_value=25, value=5, step=1)
32
+ knn_weights = st.sidebar.radio("KNN Weights:", ["uniform", "distance"])
33
+
34
+ # KNN Algorithm
35
+ st.sidebar.subheader("KNN Algorithm")
36
+ algorithms_selected = []
37
+ if st.sidebar.checkbox("auto", value=True):
38
+ algorithms_selected.append("auto")
39
+ if st.sidebar.checkbox("ball_tree"):
40
+ algorithms_selected.append("ball_tree")
41
+ if st.sidebar.checkbox("kd_tree"):
42
+ algorithms_selected.append("kd_tree")
43
+ if st.sidebar.checkbox("brute"):
44
+ algorithms_selected.append("brute")
45
+
46
+ # KNN Metric
47
+ st.sidebar.subheader("KNN Metric")
48
+ metrics_selected = []
49
+ if st.sidebar.checkbox("euclidean", value=True):
50
+ metrics_selected.append("euclidean")
51
+ if st.sidebar.checkbox("manhattan"):
52
+ metrics_selected.append("manhattan")
53
+ if st.sidebar.checkbox("minkowski"):
54
+ metrics_selected.append("minkowski")
55
+
56
+ # Generate dataset
57
+ if data_type == "Blobs":
58
+ X, y = make_blobs(n_samples=5000, centers=2, cluster_std=noise, random_state=42)
59
+ elif data_type == "Circles":
60
+ X, y = make_circles(n_samples=5000, noise=noise, factor=0.5, random_state=42)
61
+ elif data_type == "Moons":
62
+ X, y = make_moons(n_samples=5000, noise=noise, random_state=42)
63
+ else:
64
+ X, y = make_classification(n_samples=5000, n_features=2, n_classes=2, n_informative=2, n_redundant=0, random_state=42)
65
+
66
+ # Split dataset
67
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
68
+
69
+ # Model selection
70
+ if model_name == "KNN":
71
+ model = KNeighborsClassifier(n_neighbors=neighbors, weights=knn_weights, algorithm=algorithms_selected[0] if algorithms_selected else 'auto', metric=metrics_selected[0] if metrics_selected else 'minkowski')
72
+ elif model_name == "Decision Tree":
73
+ model = DecisionTreeClassifier(random_state=42)
74
+ elif model_name == "Naive Bayes":
75
+ model = GaussianNB()
76
+ elif model_name == "Logistic Regression":
77
+ model = LogisticRegression(max_iter=200, random_state=42)
78
+ else:
79
+ model = SVC(probability=True, kernel='linear', random_state=42)
80
+
81
+ # Fit the model
82
+ model.fit(X_train, y_train)
83
+
84
+ # Predict and calculate metrics
85
+ y_pred = model.predict(X_test)
86
+ accuracy = accuracy_score(y_test, y_pred)
87
+ precision = precision_score(y_test, y_pred)
88
+ recall = recall_score(y_test, y_pred)
89
+ f1 = f1_score(y_test, y_pred)
90
+ auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]) if hasattr(model, "predict_proba") else "N/A"
91
+
92
+ # --- Display model performance under the radio button ---
93
+ with st.sidebar:
94
+ st.subheader(f"{model_name} Model Evaluation Metrics")
95
+ st.write(f" Accuracy: {accuracy:.2f}")
96
+ st.write(f" Precision: {precision:.2f}")
97
+ st.write(f" Recall: {recall:.2f}")
98
+ st.write(f" F1 Score: {f1:.2f}")
99
+ st.write(f" AUC Score: {auc:.2f}")
100
+
101
+ # Plot dataset
102
+ st.subheader("Dataset Visualization")
103
+ fig, ax = plt.subplots()
104
+ sns.scatterplot(x=X[:, 0], y=X[:, 1], hue=y, palette="coolwarm", s=50, edgecolor="k")
105
+ st.pyplot(fig)
106
+
107
+ # Decision Boundary
108
+ st.subheader("Decision Boundary")
109
+ fig, ax = plt.subplots()
110
+ plot_decision_regions(X_train, y_train, clf=model, legend=2)
111
+ st.pyplot(fig)
112
+
113
+ # Learning Curve
114
+ st.subheader("Learning Curve")
115
+ train_sizes, train_scores, test_scores = learning_curve(model, X_train, y_train, cv=5, train_sizes=np.linspace(0.1, 1.0, 10))
116
+ train_mean = np.mean(train_scores, axis=1)
117
+ test_mean = np.mean(test_scores, axis=1)
118
+
119
+ fig, ax = plt.subplots()
120
+ ax.plot(train_sizes, train_mean, label='Train Accuracy', marker='o')
121
+ ax.plot(train_sizes, test_mean, label='Test Accuracy', marker='s')
122
+ ax.set_xlabel("Training Size")
123
+ ax.set_ylabel("Accuracy")
124
+ ax.legend()
125
+ st.pyplot(fig)