|
|
import gradio as gr |
|
|
import numpy as np |
|
|
import matplotlib.pyplot as plt |
|
|
from sklearn.linear_model import LinearRegression |
|
|
|
|
|
|
|
|
np.random.seed(42) |
|
|
x = np.linspace(0, 5, 100).reshape(-1, 1) |
|
|
y = 2 * x + np.random.normal(0, 0.5, x.shape) |
|
|
|
|
|
|
|
|
lin_reg = LinearRegression() |
|
|
lin_reg.fit(x, y) |
|
|
|
|
|
def visualize_model(lr=0.01, epochs=50): |
|
|
|
|
|
w, b = 0.0, 0.0 |
|
|
n = len(x) |
|
|
losses = [] |
|
|
for epoch in range(epochs): |
|
|
y_pred = w * x + b |
|
|
loss = np.mean((y - y_pred)**2) |
|
|
losses.append(loss) |
|
|
dw = (-2/n) * np.sum((y - y_pred) * x) |
|
|
db = (-2/n) * np.sum(y - y_pred) |
|
|
w -= lr * dw |
|
|
b -= lr * db |
|
|
|
|
|
|
|
|
fig, axs = plt.subplots(1, 2, figsize=(10, 4)) |
|
|
|
|
|
|
|
|
axs[0].scatter(x, y, label="Data", alpha=0.6) |
|
|
axs[0].plot(x, lin_reg.predict(x), label="Explicit Solution", color="green") |
|
|
axs[0].plot(x, w * x + b, label="Gradient Descent", color="red") |
|
|
axs[0].legend() |
|
|
axs[0].set_title("Model Predictions") |
|
|
|
|
|
|
|
|
axs[1].plot(range(epochs), losses, color="blue") |
|
|
axs[1].set_title("Loss vs Epoch") |
|
|
axs[1].set_xlabel("Epoch") |
|
|
axs[1].set_ylabel("Loss") |
|
|
|
|
|
return fig |
|
|
|
|
|
demo = gr.Interface( |
|
|
fn=visualize_model, |
|
|
inputs=[ |
|
|
gr.Slider(0.001, 0.1, value=0.01, label="Learning Rate"), |
|
|
gr.Slider(10, 200, value=50, step=10, label="Epochs") |
|
|
], |
|
|
outputs=gr.Plot(), |
|
|
title="Linear Regression Visualization", |
|
|
description="Adjust learning rate and epochs to see Gradient Descent vs Explicit Solution." |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |