Spaces:
Running
Running
| import streamlit as st | |
| import torch | |
| import torch.nn as nn | |
| import torch.optim as optim | |
| import matplotlib.pyplot as plt | |
| # Define the dataset | |
| def generate_data(n_samples): | |
| torch.manual_seed(42) | |
| X = torch.randn(n_samples, 1) * 10 | |
| y = 2 * X + 3 + torch.randn(n_samples, 1) * 3 | |
| return X, y | |
| # Define the linear regression model | |
| class LinearRegressionModel(nn.Module): | |
| def __init__(self): | |
| super(LinearRegressionModel, self).__init__() | |
| self.linear = nn.Linear(1, 1) | |
| def forward(self, x): | |
| return self.linear(x) | |
| # Train the model | |
| def train_model(X, y, lr, epochs): | |
| model = LinearRegressionModel() | |
| criterion = nn.MSELoss() | |
| optimizer = optim.SGD(model.parameters(), lr=lr) | |
| for epoch in range(epochs): | |
| model.train() | |
| optimizer.zero_grad() | |
| outputs = model(X) | |
| loss = criterion(outputs, y) | |
| loss.backward() | |
| optimizer.step() | |
| return model | |
| # Plot the results | |
| def plot_results(X, y, model): | |
| plt.scatter(X.numpy(), y.numpy(), label='Original data') | |
| plt.plot(X.numpy(), model(X).detach().numpy(), label='Fitted line', color='r') | |
| plt.legend() | |
| plt.xlabel('X') | |
| plt.ylabel('y') | |
| st.pyplot(plt.gcf()) | |
| # Streamlit interface | |
| st.title('Simple Linear Regression with PyTorch') | |
| n_samples = st.slider('Number of samples', 20, 100, 50) | |
| learning_rate = st.slider('Learning rate', 0.001, 0.1, 0.01) | |
| epochs = st.slider('Number of epochs', 100, 1000, 500) | |
| X, y = generate_data(n_samples) | |
| model = train_model(X, y, learning_rate, epochs) | |
| st.subheader('Training Data') | |
| plot_results(X, y, model) | |
| st.subheader('Model Parameters') | |
| st.write(f'Weight: {model.linear.weight.item()}') | |
| st.write(f'Bias: {model.linear.bias.item()}') | |
| st.subheader('Loss Curve') | |
| losses = [] | |
| model = LinearRegressionModel() | |
| criterion = nn.MSELoss() | |
| optimizer = optim.SGD(model.parameters(), lr=learning_rate) | |
| for epoch in range(epochs): | |
| model.train() | |
| optimizer.zero_grad() | |
| outputs = model(X) | |
| loss = criterion(outputs, y) | |
| loss.backward() | |
| optimizer.step() | |
| losses.append(loss.item()) | |
| plt.figure() | |
| plt.plot(range(epochs), losses) | |
| plt.xlabel('Epoch') | |
| plt.ylabel('Loss') | |
| st.pyplot(plt.gcf()) | |