File size: 1,685 Bytes
93de162 b0ff03e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import gradio as gr
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
# Synthetic dataset
np.random.seed(42)
x = np.linspace(0, 5, 100).reshape(-1, 1)
y = 2 * x + np.random.normal(0, 0.5, x.shape)
# Explicit solution model
lin_reg = LinearRegression()
lin_reg.fit(x, y)
def visualize_model(lr=0.01, epochs=50):
# Gradient Descent training
w, b = 0.0, 0.0
n = len(x)
losses = []
for epoch in range(epochs):
y_pred = w * x + b
loss = np.mean((y - y_pred)**2)
losses.append(loss)
dw = (-2/n) * np.sum((y - y_pred) * x)
db = (-2/n) * np.sum(y - y_pred)
w -= lr * dw
b -= lr * db
# Plot predictions + loss curve
fig, axs = plt.subplots(1, 2, figsize=(10, 4))
# Left: data + predictions
axs[0].scatter(x, y, label="Data", alpha=0.6)
axs[0].plot(x, lin_reg.predict(x), label="Explicit Solution", color="green")
axs[0].plot(x, w * x + b, label="Gradient Descent", color="red")
axs[0].legend()
axs[0].set_title("Model Predictions")
# Right: loss curve
axs[1].plot(range(epochs), losses, color="blue")
axs[1].set_title("Loss vs Epoch")
axs[1].set_xlabel("Epoch")
axs[1].set_ylabel("Loss")
return fig
demo = gr.Interface(
fn=visualize_model,
inputs=[
gr.Slider(0.001, 0.1, value=0.01, label="Learning Rate"),
gr.Slider(10, 200, value=50, step=10, label="Epochs")
],
outputs=gr.Plot(),
title="Linear Regression Visualization",
description="Adjust learning rate and epochs to see Gradient Descent vs Explicit Solution."
)
if __name__ == "__main__":
demo.launch() |