Spaces:
Sleeping
Sleeping
| # import streamlit as st | |
| # import numpy as np | |
| # from sklearn.datasets import make_circles,make_moons,make_blobs | |
| # from sklearn.preprocessing import StandardScaler | |
| # from sklearn.model_selection import train_test_split | |
| # from tensorflow import keras | |
| # import matplotlib.pyplot as plt | |
| # import seaborn as sns | |
| # from keras.models import Sequential | |
| # from keras.layers import InputLayer,Dense,Dropout | |
| # from keras.losses import MeanAbsoluteError,MeanSquaredError | |
| # from keras.optimizers import SGD | |
| # from keras.regularizers import l1,l1,l1_l2 | |
| # from mlxtend.plotting import plot_decision_regions | |
| # import graphviz | |
| # st.title("TensorFlow Playground") | |
| # with st.sidebar: | |
| # st.header("Choose Dataset") | |
| # dataset = st.selectbox("Select Dataset",["Blobs","Circles","Moons"]) | |
| # # on = st.toggle("Upload Dataset(.csv file)") | |
| # # if on: | |
| # # st.write("**Note:** Only 2 features are allowed.") | |
| # # up_file = st.file_uploader("Upload Dataset (.csv or .xlsx)", type=["csv"]) | |
| # noise = st.slider("Noise",0.0,1.0,0.1) | |
| # test_size = st.slider("Test Size",0.1,0.5,0.05) | |
| # st.header("Model Hyperparameters") | |
| # hl = st.number_input("Hidden Layers",1,10,step=1) | |
| # numbers = st.text_input("Neurons for each hidden layer" ,placeholder="e.g. 8,16,32") | |
| # input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""] | |
| # nn = input_func(numbers) | |
| # epochs=st.number_input("Epochs",1,10000,step=1,value=10) | |
| # col1, col2 = st.columns(2) | |
| # with col1: | |
| # af = st.selectbox("Activation Function",["sigmoid","tanh","relu"],index=2) | |
| # with col2: | |
| # lr = st.selectbox("Learning Rate",[0.1,0.01,0.02,0.2]) | |
| # # col3,col4 = st.column(2) | |
| # # with col3: | |
| # reg = st.selectbox("Regularizer", ["None", "L1", "L2","ElasticNet"]) | |
| # if reg != "None": | |
| # reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01) | |
| # # with col4: | |
| # es = st.selectbox("Early Stopping",["No","Yes"],index=0) | |
| # if es == "Yes": | |
| # col3, col4 = st.columns(2) | |
| # with col3: | |
| # min_delta = st.number_input("Minimum Delta",0.001,0.9,step=0.1) | |
| # with col4: | |
| # patience = st.number_input("Patience",3,20,step=1) | |
| # btn=st.sidebar.button("Train") | |
| # if btn: | |
| # if dataset: | |
| # if dataset=="Circles": | |
| # x,y=make_circles(n_samples=1000,noise=noise,random_state=42,factor=0.1) | |
| # elif dataset=="moons": | |
| # x,y=make_moons(n_samples=1000,noise=noise,random_state=42) | |
| # elif dataset == "Blobs": | |
| # x,y=make_blobs(n_samples=1000, centers=2, cluster_std=noise, random_state=42) | |
| # x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=test_size,random_state=42,stratify=y) | |
| # std = StandardScaler() | |
| # x_train = std.fit_transform(x_train) | |
| # x_test = std.transform(x_test) | |
| # if reg == "L1": | |
| # reg = l1(reg_rate) | |
| # elif reg == "L2": | |
| # reg = l2(reg_rate) | |
| # elif reg == "ElasticNet": | |
| # reg = l1_l2(l1=reg_rate, l2=reg_rate) | |
| # else: | |
| # reg=None | |
| # model = Sequential() | |
| # model.add(InputLayer(shape=(2,))) | |
| # if hl == len(nn): | |
| # for i in range(0,len(nn)): | |
| # model.add(Dense(units=nn[i],activation=af,kernel_regularizer=reg)) | |
| # model.add(Dense(units=1,activation="sigmoid",kernel_regularizer=reg)) | |
| # sgd=SGD(learning_rate=lr) | |
| # model.compile(loss="binary_crossentropy",optimizer=sgd,metrics=["accuracy"]) | |
| # train_size=round(x_train.shape[0]-x_train.shape[0]*0.2) | |
| # callbacks = [] | |
| # if es == "Yes": | |
| # es = EarlyStopping( | |
| # monitor="val_loss", | |
| # #min_delta= min_delta if min_delta else 0.001, | |
| # min_delta = min_delta, | |
| # patience=patience, | |
| # verbose=1, | |
| # restore_best_weights=True, | |
| # start_from_epoch=50 | |
| # ) | |
| # callbacks.append(es) | |
| # hist=model.fit(x_train,y_train,epochs=epochs,batch_size=train_size,validation_data=(x_test, y_test),verbose=False) | |
| # # # --- Neural Network Diagram --- | |
| # # st.subheader("Neural Network Architecture") | |
| # # stringlist = [] | |
| # # model.summary(print_fn=lambda x: stringlist.append(x)) | |
| # # summary_str = "\n".join(stringlist) | |
| # # st.text(summary_str) | |
| # # ---------------- Graphical NN Architecture ---------------- | |
| # st.subheader("Neural Network Architecture") | |
| # def visualize_nn(layers): | |
| # dot = graphviz.Digraph() | |
| # dot.attr(rankdir="LR") | |
| # dot.node("Input", "Input Layer\nfeatures=2", shape="box", style="filled", color="lightblue") | |
| # for i, units in enumerate(layers): | |
| # dot.node(f"H{i}", f"Hidden {i+1}\nunits={units}", shape="box", style="filled", color="lightgreen") | |
| # if i == 0: | |
| # dot.edge("Input", f"H{i}") | |
| # else: | |
| # dot.edge(f"H{i-1}", f"H{i}") | |
| # dot.node("Output", "Output Layer\nunits=1", shape="box", style="filled", color="lightcoral") | |
| # dot.edge(f"H{len(layers)-1}" if layers else "Input", "Output") | |
| # return dot | |
| # dot = visualize_nn(nn) | |
| # st.graphviz_chart(dot) | |
| # # --- Plotting Decision region --- | |
| # st.subheader("Decision Region") | |
| # fig1, ax1 = plt.subplots(figsize=(6, 5)) | |
| # plot_decision_regions(X=x_train, y=y_train.astype(np.int_), clf=model, ax=ax1) | |
| # #plot_decision_regions(X=x_test, y=y_test.astype(np.int_), clf=model, ax=ax1) | |
| # ax1.set_title("Decision Regions", fontsize=12, weight="bold") | |
| # st.pyplot(fig1) | |
| # # --- Plot 2: Training vs Validation Loss --- | |
| # st.subheader("Training vs Validation Loss") | |
| # fig2, ax2 = plt.subplots(figsize=(6, 5)) | |
| # ax2.plot(hist.history["loss"], label="Training Loss", linewidth=2) | |
| # ax2.plot(hist.history["val_loss"], label="Validation Loss", linewidth=2, linestyle="--") | |
| # ax2.set_xlabel("Epochs") | |
| # ax2.set_ylabel("Loss") | |
| # ax2.legend() | |
| # ax2.grid(alpha=0.3) | |
| # st.pyplot(fig2) | |
| # import streamlit as st | |
| # import numpy as np | |
| # import pandas as pd | |
| # from sklearn.datasets import make_circles, make_moons, make_blobs | |
| # from sklearn.preprocessing import StandardScaler | |
| # from sklearn.model_selection import train_test_split | |
| # from tensorflow.keras.models import Sequential | |
| # from tensorflow.keras.layers import InputLayer, Dense | |
| # from tensorflow.keras.optimizers import SGD | |
| # from tensorflow.keras.regularizers import l1, l2, l1_l2 | |
| # from tensorflow.keras.callbacks import EarlyStopping | |
| # import matplotlib.pyplot as plt | |
| # from mlxtend.plotting import plot_decision_regions | |
| # import graphviz | |
| # # ----------------------------- | |
| # # Streamlit UI | |
| # # ----------------------------- | |
| # st.title("TensorFlow Playground") | |
| # with st.sidebar: | |
| # st.header("Dataset Options") | |
| # dataset_mode = st.radio("Choose Dataset Mode", ["Synthetic", "Upload CSV"]) | |
| # if dataset_mode == "Synthetic": | |
| # dataset = st.selectbox("Select Dataset", ["Blobs", "Circles", "Moons"]) | |
| # noise = st.slider("Noise", 0.0, 1.0, 0.1) | |
| # test_size = st.slider("Test Size", 0.1, 0.5, 0.2) | |
| # else: | |
| # uploaded_file = st.file_uploader("Upload your CSV", type=["csv"]) | |
| # if uploaded_file is not None: | |
| # df = pd.read_csv(uploaded_file) | |
| # if df.shape[1] < 3: | |
| # st.error("Your dataset must have at least 3 columns (2 features + 1 target).") | |
| # st.stop() | |
| # feature_cols = st.multiselect("Select exactly 2 features", df.columns[:-1]) | |
| # target_col = st.selectbox("Select target column", df.columns) | |
| # if len(feature_cols) != 2: | |
| # st.error("Please select exactly 2 features.") | |
| # st.stop() | |
| # X = df[feature_cols].values | |
| # y = df[target_col].values | |
| # test_size = st.slider("Test Size", 0.1, 0.5, 0.2) | |
| # else: | |
| # st.warning("Upload a CSV file to continue.") | |
| # st.stop() | |
| # st.header("Model Hyperparameters") | |
| # hl = st.number_input("Hidden Layers", 1, 10, step=1) | |
| # numbers = st.text_input("Neurons for each hidden layer", placeholder="e.g. 8,16,32") | |
| # input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""] | |
| # nn = input_func(numbers) | |
| # n_epochs = st.number_input("Epochs", 1, 10000, step=1, value=50) | |
| # col1, col2 = st.columns(2) | |
| # with col1: | |
| # af = st.selectbox("Activation Function", ["sigmoid", "tanh", "relu"], index=2) | |
| # with col2: | |
| # lr = st.selectbox("Learning Rate", [0.1, 0.01, 0.02, 0.2], index=1) | |
| # reg = st.selectbox("Regularizer", ["None", "L1", "L2", "ElasticNet"]) | |
| # if reg != "None": | |
| # reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01) | |
| # early_stop_option = st.selectbox("Early Stopping", ["No", "Yes"], index=0) | |
| # if early_stop_option == "Yes": | |
| # col3, col4 = st.columns(2) | |
| # with col3: | |
| # min_delta = st.number_input("Minimum Delta", 0.001, 0.9, step=0.01) | |
| # with col4: | |
| # patience = st.number_input("Patience", 3, 20, step=1) | |
| # # ----------------------------- | |
| # # Train Button | |
| # # ----------------------------- | |
| # if st.sidebar.button("Train"): | |
| # # ---------------- Dataset ---------------- | |
| # if dataset_mode == "Synthetic": | |
| # if dataset == "Circles": | |
| # X, y = make_circles(n_samples=1000, noise=noise, random_state=42, factor=0.5) | |
| # elif dataset == "Moons": | |
| # X, y = make_moons(n_samples=1000, noise=noise, random_state=42) | |
| # elif dataset == "Blobs": | |
| # X, y = make_blobs(n_samples=1000, centers=2, cluster_std=noise+0.5, random_state=42) | |
| # # Split dataset | |
| # X_train, X_test, y_train, y_test = train_test_split( | |
| # X, y, test_size=test_size, random_state=42, stratify=y | |
| # ) | |
| # # Standardize | |
| # std = StandardScaler() | |
| # X_train = std.fit_transform(X_train) | |
| # X_test = std.transform(X_test) | |
| # # ---------------- Regularizer ---------------- | |
| # if reg == "L1": | |
| # reg = l1(reg_rate) | |
| # elif reg == "L2": | |
| # reg = l2(reg_rate) | |
| # elif reg == "ElasticNet": | |
| # reg = l1_l2(l1=reg_rate, l2=reg_rate) | |
| # else: | |
| # reg = None | |
| # # ---------------- Model ---------------- | |
| # model = Sequential() | |
| # model.add(InputLayer(shape=(2,))) | |
| # if hl == len(nn): | |
| # for units in nn: | |
| # model.add(Dense(units=units, activation=af, kernel_regularizer=reg)) | |
| # model.add(Dense(units=1, activation="sigmoid", kernel_regularizer=reg)) | |
| # sgd = SGD(learning_rate=lr) | |
| # model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=["accuracy"]) | |
| # # ---------------- Callbacks ---------------- | |
| # callbacks = [] | |
| # if early_stop_option == "Yes": | |
| # es = EarlyStopping( | |
| # monitor="val_loss", | |
| # min_delta=min_delta, | |
| # patience=patience, | |
| # verbose=1, | |
| # restore_best_weights=True, | |
| # start_from_epoch=50, | |
| # ) | |
| # callbacks.append(es) | |
| # # ---------------- Training ---------------- | |
| # hist = model.fit( | |
| # X_train, | |
| # y_train, | |
| # epochs=n_epochs, | |
| # batch_size=len(X_train), | |
| # validation_data=(X_test, y_test), | |
| # verbose=0, | |
| # callbacks=callbacks, | |
| # ) | |
| # # ---------------- Decision Boundary ---------------- | |
| # st.subheader("Decision Boundary") | |
| # fig, ax = plt.subplots() | |
| # plot_decision_regions(X_test, y_test, clf=model, legend=2) | |
| # plt.xlabel("Feature 1") | |
| # plt.ylabel("Feature 2") | |
| # st.pyplot(fig) | |
| # # ---------------- Loss Curves ---------------- | |
| # st.subheader("Training vs Validation Loss") | |
| # fig2, ax2 = plt.subplots() | |
| # ax2.plot(hist.history["loss"], label="Train Loss") | |
| # ax2.plot(hist.history["val_loss"], label="Validation Loss") | |
| # ax2.set_xlabel("Epochs") | |
| # ax2.set_ylabel("Loss") | |
| # ax2.legend() | |
| # st.pyplot(fig2) | |
| # # ---------------- Accuracy Curves ---------------- | |
| # st.subheader("Training vs Validation Accuracy") | |
| # fig3, ax3 = plt.subplots() | |
| # ax3.plot(hist.history["accuracy"], label="Train Accuracy") | |
| # ax3.plot(hist.history["val_accuracy"], label="Validation Accuracy") | |
| # ax3.set_xlabel("Epochs") | |
| # ax3.set_ylabel("Accuracy") | |
| # ax3.legend() | |
| # st.pyplot(fig3) | |
| # # ---------------- Graphical NN Architecture ---------------- | |
| # st.subheader("Neural Network Architecture") | |
| # def visualize_nn(layers): | |
| # dot = graphviz.Digraph() | |
| # dot.attr(rankdir="LR") | |
| # dot.node("Input", "Input Layer\nfeatures=2", shape="box", style="filled", color="lightblue") | |
| # for i, units in enumerate(layers): | |
| # dot.node(f"H{i}", f"Hidden {i+1}\nunits={units}", shape="box", style="filled", color="lightgreen") | |
| # if i == 0: | |
| # dot.edge("Input", f"H{i}") | |
| # else: | |
| # dot.edge(f"H{i-1}", f"H{i}") | |
| # dot.node("Output", "Output Layer\nunits=1", shape="box", style="filled", color="lightcoral") | |
| # dot.edge(f"H{len(layers)-1}" if layers else "Input", "Output") | |
| # return dot | |
| # dot = visualize_nn(nn) | |
| # st.graphviz_chart(dot) | |
| import streamlit as st | |
| import numpy as np | |
| import pandas as pd | |
| from sklearn.datasets import make_circles, make_moons, make_blobs | |
| from sklearn.preprocessing import StandardScaler | |
| from sklearn.model_selection import train_test_split | |
| from tensorflow import keras | |
| import matplotlib.pyplot as plt | |
| from keras.models import Sequential | |
| from keras.layers import InputLayer, Dense, Dropout | |
| from keras.optimizers import SGD | |
| from keras.regularizers import l1, l2, l1_l2 | |
| from keras.callbacks import EarlyStopping | |
| from mlxtend.plotting import plot_decision_regions | |
| import graphviz | |
| # ========== Custom CSS ========== | |
| st.markdown(""" | |
| <style> | |
| /* Background */ | |
| .stApp { | |
| background: linear-gradient(to right, #f8f9fa, #eef2f3); | |
| font-family: 'Segoe UI', sans-serif; | |
| } | |
| /* Sidebar */ | |
| section[data-testid="stSidebar"] { | |
| background-color: #2C3E50 !important; | |
| } | |
| section[data-testid="stSidebar"] h1, | |
| section[data-testid="stSidebar"] h2, | |
| section[data-testid="stSidebar"] h3, | |
| section[data-testid="stSidebar"] label { | |
| color: white !important; | |
| } | |
| /* Title */ | |
| h1 { | |
| text-align: center; | |
| color: #2C3E50; | |
| font-size: 40px; | |
| font-weight: bold; | |
| margin-bottom: 20px; | |
| } | |
| /* Buttons */ | |
| div.stButton > button { | |
| background: #3498DB; | |
| color: white; | |
| border-radius: 8px; | |
| padding: 10px 24px; | |
| border: none; | |
| font-size: 16px; | |
| transition: 0.3s; | |
| } | |
| div.stButton > button:hover { | |
| background: #2980B9; | |
| } | |
| /* Select & Input */ | |
| .stSelectbox, .stNumberInput, .stSlider { | |
| border-radius: 10px !important; | |
| } | |
| /* Charts */ | |
| .css-1v0mbdj, .css-1y0tads { | |
| background-color: white; | |
| padding: 20px; | |
| border-radius: 12px; | |
| box-shadow: 0px 4px 15px rgba(0,0,0,0.1); | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # Title | |
| st.title("✨ TensorFlow Playground") | |
| # Sidebar | |
| with st.sidebar: | |
| st.header("Choose Dataset") | |
| dataset = st.selectbox("Select Dataset", ["Blobs", "Circles", "Moons", "Upload CSV"]) | |
| noise = st.slider("Noise", 0.0, 1.0, 0.1) | |
| test_size = st.slider("Test Size", 0.1, 0.5, 0.2) | |
| st.header("Model Hyperparameters") | |
| hl = st.number_input("Hidden Layers", 1, 10, step=1) | |
| numbers = st.text_input("Neurons for each hidden layer", placeholder="e.g. 8,16,32") | |
| input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""] | |
| nn = input_func(numbers) | |
| epochs = st.number_input("Epochs", 1, 10000, step=1, value=10) | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| af = st.selectbox("Activation Function", ["Sigmoid", "Tanh", "Relu"], index=2) | |
| with col2: | |
| lr = st.selectbox("Learning Rate", [0.1, 0.01, 0.02, 0.2]) | |
| reg_choice = st.selectbox("Regularizer", ["None", "L1", "L2", "ElasticNet"]) | |
| if reg_choice != "None": | |
| reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01) | |
| es = st.selectbox("Early Stopping", ["No", "Yes"], index=0) | |
| if es == "Yes": | |
| col3, col4 = st.columns(2) | |
| with col3: | |
| min_delta = st.number_input("Minimum Delta", 0.001, 0.9, step=0.1) | |
| with col4: | |
| patience = st.number_input("Patience", 3, 20, step=1) | |
| # Dataset | |
| if dataset == "Upload CSV": | |
| uploaded_file = st.sidebar.file_uploader("Upload your CSV", type=["csv"]) | |
| if uploaded_file is not None: | |
| df = pd.read_csv(uploaded_file) | |
| if df.shape[1] < 3: | |
| st.error("Your dataset must have at least 3 columns (2 features + 1 target).") | |
| st.stop() | |
| feature_cols = st.sidebar.multiselect("Select exactly 2 features", df.columns[:-1]) | |
| target_col = st.sidebar.selectbox("Select target column", df.columns) | |
| if len(feature_cols) != 2: | |
| st.error("Please select exactly 2 features.") | |
| st.stop() | |
| X = df[feature_cols].values | |
| y = df[target_col].values | |
| else: | |
| st.warning("Upload a CSV file below in the side bar to continue.") | |
| st.stop() | |
| else: | |
| if dataset == "Circles": | |
| X, y = make_circles(n_samples=1000, noise=noise, random_state=42, factor=0.5) | |
| elif dataset == "Moons": | |
| X, y = make_moons(n_samples=1000, noise=noise, random_state=42) | |
| elif dataset == "Blobs": | |
| X, y = make_blobs(n_samples=1000, centers=2, cluster_std=noise, random_state=42) | |
| # Train-test split | |
| x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=test_size, random_state=42, stratify=y) | |
| std = StandardScaler() | |
| x_train = std.fit_transform(x_train) | |
| x_test = std.transform(x_test) | |
| # Regularizer setup | |
| if reg_choice == "None": | |
| reg = None | |
| elif reg_choice == "L1": | |
| reg = l1(reg_rate) | |
| elif reg_choice == "L2": | |
| reg = l2(reg_rate) | |
| elif reg_choice == "ElasticNet": | |
| reg = l1_l2(l1=reg_rate, l2=reg_rate) | |
| # Build model | |
| model = Sequential() | |
| model.add(InputLayer(shape=(2,))) | |
| if hl == len(nn): | |
| for i in range(len(nn)): | |
| model.add(Dense(units=nn[i], activation=af.lower(), kernel_regularizer=reg)) | |
| model.add(Dense(units=1, activation="sigmoid", kernel_regularizer=reg)) | |
| sgd = SGD(learning_rate=lr) | |
| model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=["accuracy"]) | |
| callbacks = [] | |
| if es == "Yes": | |
| early_stopping = EarlyStopping(monitor="val_loss", min_delta=min_delta, | |
| patience=patience, verbose=1, | |
| restore_best_weights=True) | |
| callbacks.append(early_stopping) | |
| if st.sidebar.button("🚀 Train Model"): | |
| hist = model.fit(x_train, y_train, epochs=epochs, validation_data=(x_test, y_test), | |
| batch_size=32, verbose=0, callbacks=callbacks) | |
| # Neural Network Diagram | |
| st.subheader("Neural Network Architecture") | |
| def visualize_nn(layers): | |
| dot = graphviz.Digraph() | |
| dot.attr(rankdir="LR") | |
| dot.node("Input", "Input Layer\nfeatures=2", shape="box", style="filled", color="lightblue") | |
| for i, units in enumerate(layers): | |
| dot.node(f"H{i}", f"Hidden {i+1}\nunits={units}", shape="box", style="filled", color="lightgreen") | |
| if i == 0: | |
| dot.edge("Input", f"H{i}") | |
| else: | |
| dot.edge(f"H{i-1}", f"H{i}") | |
| dot.node("Output", "Output Layer\nunits=1", shape="box", style="filled", color="lightcoral") | |
| dot.edge(f"H{len(layers)-1}" if layers else "Input", "Output") | |
| return dot | |
| dot = visualize_nn(nn) | |
| st.graphviz_chart(dot) | |
| # Decision boundary | |
| st.subheader("Decision Boundary") | |
| fig, ax = plt.subplots() | |
| plot_decision_regions(X, y, clf=model, legend=2) | |
| st.pyplot(fig) | |
| # Loss plot | |
| st.subheader("Training vs Validation Loss") | |
| fig2, ax2 = plt.subplots() | |
| ax2.plot(hist.history["loss"], label="Train Loss") | |
| ax2.plot(hist.history["val_loss"], label="Validation Loss") | |
| ax2.legend() | |
| st.pyplot(fig2) | |