Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import numpy as np | |
| import matplotlib.pyplot as plt | |
| from sklearn.neighbors import KNeighborsClassifier | |
| from sklearn.metrics import accuracy_score,f1_score | |
| from sklearn.model_selection import train_test_split | |
| from sklearn.datasets import make_classification, make_circles, make_blobs, make_moons | |
| from mlxtend.plotting import plot_decision_regions, plot_learning_curves | |
| # Streamlit UI | |
| st.set_page_config(page_title="KNN Classifier App", layout="wide") | |
| st.title("π KNN Classifier Interactive App") | |
| st.write("Select a dataset, modify parameters, and tune KNN hyperparameters.") | |
| # Sidebar options | |
| dataset_choice = st.sidebar.selectbox("π Select a Dataset", ["Classification", "Circles", "Blobs", "Moons"]) | |
| # Dataset parameters | |
| if dataset_choice == "Classification": | |
| n_samples = st.sidebar.slider("Samples", 1000, 5000, 2000) | |
| class_sep = st.sidebar.slider("Class Separation", 0.5, 5.0, 1.0) | |
| X, y = make_classification(n_samples=n_samples, n_features=2, n_redundant=0, | |
| n_clusters_per_class=1, class_sep=class_sep, random_state=23) | |
| elif dataset_choice == "Circles": | |
| n_samples = st.sidebar.slider("Samples", 500, 5000, 2000) | |
| noise = st.sidebar.slider("Noise", 0.0, 0.5, 0.2) | |
| factor = st.sidebar.slider("Factor", 0.1, 0.9, 0.2) | |
| X, y = make_circles(n_samples=n_samples, factor=factor, noise=noise, random_state=23) | |
| elif dataset_choice == "Blobs": | |
| n_samples = st.sidebar.slider("Samples", 500, 5000, 2000) | |
| clusters = st.sidebar.slider("Clusters", 2, 5, 3) | |
| X, y = make_blobs(n_samples=n_samples, centers=clusters, n_features=2, random_state=23) | |
| else: # Moons | |
| n_samples = st.sidebar.slider("Samples", 500, 5000, 2000) | |
| noise = st.sidebar.slider("Noise", 0.0, 0.5, 0.2) | |
| X, y = make_moons(n_samples=n_samples, noise=noise, random_state=23) | |
| # Split dataset | |
| X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=23) | |
| # KNN Parameters | |
| st.sidebar.subheader("βοΈ KNN Parameters") | |
| n_neighbors = st.sidebar.slider("Neighbors (k)", 1, 15, 3) | |
| weights = st.sidebar.selectbox("Weights", ["uniform", "distance"]) | |
| p = st.sidebar.selectbox("p (Minkowski)", [1, 2]) | |
| # Train KNN | |
| knn = KNeighborsClassifier(n_neighbors=n_neighbors, weights=weights, p=p, metric='minkowski') | |
| knn.fit(X_train, y_train) | |
| y_pred = knn.predict(X_test) | |
| accuracy = accuracy_score(y_test, y_pred) | |
| f1=f1_score(y_test,y_pred,average="weighted") | |
| # Display results | |
| st.subheader("π Model Accuracy") | |
| st.write(f"**Accuracy Score: {accuracy:.4f}**") | |
| st.subheader("π Model F1-Score") | |
| st.write(f"**F1 Score: {f1:.4f}**") | |
| fig, ax = plt.subplots(figsize=(6, 4)) | |
| plot_learning_curves(X_train, y_train, X_test, y_test, knn, scoring='accuracy') | |
| st.subheader("π Learning Curve") | |
| st.pyplot(fig) | |
| fig, ax = plt.subplots(figsize=(6, 4)) | |
| knn.fit(X, y) | |
| plot_decision_regions(X, y, knn) | |
| st.subheader("π· Decision Boundary") | |
| st.pyplot(fig) | |