Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import matplotlib.pyplot as plt
|
| 3 |
+
import numpy as np
|
| 4 |
+
import time
|
| 5 |
+
from sklearn.base import BaseEstimator, clone
|
| 6 |
+
from sklearn.cluster import AgglomerativeClustering
|
| 7 |
+
from sklearn.datasets import make_blobs
|
| 8 |
+
from sklearn.ensemble import RandomForestClassifier
|
| 9 |
+
from sklearn.inspection import DecisionBoundaryDisplay
|
| 10 |
+
from sklearn.utils.metaestimators import available_if
|
| 11 |
+
from sklearn.utils.validation import check_is_fitted
|
| 12 |
+
|
| 13 |
+
theme = gr.themes.Monochrome(
|
| 14 |
+
primary_hue="indigo",
|
| 15 |
+
secondary_hue="blue",
|
| 16 |
+
neutral_hue="slate",
|
| 17 |
+
)
|
| 18 |
+
model_card = f"""
|
| 19 |
+
## Description
|
| 20 |
+
|
| 21 |
+
**Clustering** can be costly, especially when we have a lot of data.
|
| 22 |
+
Some clustering algorithms cannot be used with new data without redoing the clustering, which can be difficult.
|
| 23 |
+
Instead, we can use clustering to create a model with a classifier, it calls **Inductive Clustering**
|
| 24 |
+
This demo illustrates a generic implementation of a meta-estimator which extends clustering by inducing a classifier from the cluster labels, and compares the running time.
|
| 25 |
+
You can play around with different ``number of samples`` and ``number of new data`` to see the effect
|
| 26 |
+
|
| 27 |
+
## Dataset
|
| 28 |
+
|
| 29 |
+
Simulation dataset
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def _classifier_has(attr):
|
| 33 |
+
"""Check if we can delegate a method to the underlying classifier.
|
| 34 |
+
|
| 35 |
+
First, we check the first fitted classifier if available, otherwise we
|
| 36 |
+
check the unfitted classifier.
|
| 37 |
+
"""
|
| 38 |
+
return lambda estimator: (
|
| 39 |
+
hasattr(estimator.classifier_, attr)
|
| 40 |
+
if hasattr(estimator, "classifier_")
|
| 41 |
+
else hasattr(estimator.classifier, attr)
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
class InductiveClusterer(BaseEstimator):
|
| 45 |
+
def __init__(self, clusterer, classifier):
|
| 46 |
+
self.clusterer = clusterer
|
| 47 |
+
self.classifier = classifier
|
| 48 |
+
|
| 49 |
+
def fit(self, X, y=None):
|
| 50 |
+
self.clusterer_ = clone(self.clusterer)
|
| 51 |
+
self.classifier_ = clone(self.classifier)
|
| 52 |
+
y = self.clusterer_.fit_predict(X)
|
| 53 |
+
self.classifier_.fit(X, y)
|
| 54 |
+
return self
|
| 55 |
+
|
| 56 |
+
@available_if(_classifier_has("predict"))
|
| 57 |
+
def predict(self, X):
|
| 58 |
+
check_is_fitted(self)
|
| 59 |
+
return self.classifier_.predict(X)
|
| 60 |
+
|
| 61 |
+
@available_if(_classifier_has("decision_function"))
|
| 62 |
+
def decision_function(self, X):
|
| 63 |
+
check_is_fitted(self)
|
| 64 |
+
return self.classifier_.decision_function(X)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def do_train(n_samples, n_new_data):
|
| 68 |
+
|
| 69 |
+
N_SAMPLES = n_samples
|
| 70 |
+
N_NEW_DATA = n_new_data
|
| 71 |
+
RANDOM_STATE = 42
|
| 72 |
+
|
| 73 |
+
# Generate some training data from clustering
|
| 74 |
+
X, y = make_blobs(
|
| 75 |
+
n_samples=N_SAMPLES,
|
| 76 |
+
cluster_std=[1.0, 1.0, 0.5],
|
| 77 |
+
centers=[(-5, -5), (0, 0), (5, 5)],
|
| 78 |
+
random_state=RANDOM_STATE,
|
| 79 |
+
)
|
| 80 |
+
# Train a clustering algorithm on the training data and get the cluster labels
|
| 81 |
+
clusterer = AgglomerativeClustering(n_clusters=3)
|
| 82 |
+
cluster_labels = clusterer.fit_predict(X)
|
| 83 |
+
|
| 84 |
+
fig1, axes1 = plt.subplots()
|
| 85 |
+
axes1.scatter(X[:, 0], X[:, 1], c=cluster_labels, alpha=0.5, edgecolor="k")
|
| 86 |
+
axes1.set_title("Ward Linkage")
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# Generate new samples and plot them along with the original dataset
|
| 90 |
+
X_new, y_new = make_blobs(
|
| 91 |
+
n_samples=N_NEW_DATA, centers=[(-7, -1), (-2, 4), (3, 6)], random_state=RANDOM_STATE
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
fig2, axes2 = plt.subplots()
|
| 95 |
+
axes2.scatter(X[:, 0], X[:, 1], c=cluster_labels, alpha=0.5, edgecolor="k")
|
| 96 |
+
axes2.scatter(X_new[:, 0], X_new[:, 1], c="black", alpha=1, edgecolor="k")
|
| 97 |
+
axes2.set_title("Unknown instances")
|
| 98 |
+
|
| 99 |
+
# Declare the inductive learning model that it will be used to
|
| 100 |
+
# predict cluster membership for unknown instances
|
| 101 |
+
t1 = time.time()
|
| 102 |
+
classifier = RandomForestClassifier(random_state=RANDOM_STATE)
|
| 103 |
+
inductive_learner = InductiveClusterer(clusterer, classifier).fit(X)
|
| 104 |
+
|
| 105 |
+
probable_clusters = inductive_learner.predict(X_new)
|
| 106 |
+
fig3, axes3 = plt.subplots()
|
| 107 |
+
disp = DecisionBoundaryDisplay.from_estimator(
|
| 108 |
+
inductive_learner, X, response_method="predict", alpha=0.4, ax=axes3
|
| 109 |
+
)
|
| 110 |
+
disp.ax_.set_title("Classify unknown instances with known clusters")
|
| 111 |
+
disp.ax_.scatter(X[:, 0], X[:, 1], c=cluster_labels, alpha=0.5, edgecolor="k")
|
| 112 |
+
disp.ax_.scatter(X_new[:, 0], X_new[:, 1], c=probable_clusters, alpha=0.5, edgecolor="k")
|
| 113 |
+
t1_running = time.time() - t1
|
| 114 |
+
|
| 115 |
+
# recomputing clustering and classify boundary
|
| 116 |
+
t2 = time.time()
|
| 117 |
+
X_all = np.concatenate((X, X_new), axis=0)
|
| 118 |
+
clusterer = AgglomerativeClustering(n_clusters=3)
|
| 119 |
+
y = clusterer.fit_predict(X_all)
|
| 120 |
+
classifier = RandomForestClassifier(random_state=RANDOM_STATE).fit(X_all, y)
|
| 121 |
+
fig4, axes4 = plt.subplots()
|
| 122 |
+
disp = DecisionBoundaryDisplay.from_estimator(
|
| 123 |
+
classifier, X_all, response_method="predict", alpha=0.4, ax=axes4
|
| 124 |
+
)
|
| 125 |
+
disp.ax_.set_title("Classify unknown instance with recomputing clusters")
|
| 126 |
+
disp.ax_.scatter(X_all[:, 0], X_all[:, 1], c=y, alpha=0.5, edgecolor="k")
|
| 127 |
+
t2_running = time.time() - t2
|
| 128 |
+
text = f"Inductive Clustering running time: {t1_running:.4f}s. Recomputing clusters running time: {t2_running:.4f}s"
|
| 129 |
+
return fig1, fig2, fig3, fig4, text
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
with gr.Blocks(theme=theme) as demo:
|
| 134 |
+
gr.Markdown('''
|
| 135 |
+
<div>
|
| 136 |
+
<h1 style='text-align: center'>Inductive Clustering</h1>
|
| 137 |
+
</div>
|
| 138 |
+
''')
|
| 139 |
+
gr.Markdown(model_card)
|
| 140 |
+
gr.Markdown("Author: <a href=\"https://huggingface.co/vumichien\">Vu Minh Chien</a>. Based on the example from <a href=\"https://scikit-learn.org/stable/auto_examples/cluster/plot_inductive_clustering.html#sphx-glr-auto-examples-cluster-plot-inductive-clustering-py\">scikit-learn</a>")
|
| 141 |
+
n_samples = gr.Slider(minimum=5000, maximum=10000, step=1000, value=5000, label="Number of samples")
|
| 142 |
+
n_new_data = gr.Slider(minimum=100, maximum=1000, step=100, value=100, label="Number of new data")
|
| 143 |
+
with gr.Row():
|
| 144 |
+
with gr.Column():
|
| 145 |
+
plot1 = gr.Plot(label="Clustering")
|
| 146 |
+
with gr.Column():
|
| 147 |
+
plot2 = gr.Plot(label="Clustering with noise")
|
| 148 |
+
with gr.Row():
|
| 149 |
+
with gr.Column():
|
| 150 |
+
plot3 = gr.Plot(label="Inductive clustering")
|
| 151 |
+
with gr.Column():
|
| 152 |
+
plot4 = gr.Plot(label="Recomputing clustering")
|
| 153 |
+
with gr.Row():
|
| 154 |
+
results = gr.Textbox(label="Results")
|
| 155 |
+
|
| 156 |
+
n_samples.change(fn=do_train, inputs=[n_samples, n_new_data], outputs=[plot1, plot2, plot3, plot4, results])
|
| 157 |
+
n_new_data.change(fn=do_train, inputs=[n_samples, n_new_data], outputs=[plot1, plot2, plot3, plot4, results])
|
| 158 |
+
|
| 159 |
+
demo.launch()
|