Gowthamvemula commited on
Commit
742dc62
·
verified ·
1 Parent(s): 5812604

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -0
app.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import matplotlib.pyplot as plt
3
+ import streamlit as st
4
+
5
+ #Function definition
6
+ def lossfunction(x):
7
+ return x**2
8
+
9
+ #Gradient of the loss function
10
+ def gradient(x):
11
+ return 2*x
12
+
13
+ #Gradient descent algorithm
14
+ def gradient_descent(starting_point, learning_rate, iterations):
15
+ x_values = [starting_point]
16
+ for _ in range(iterations):
17
+ gradient_value = gradient(x_values[-1])
18
+ new_x = x_values[-1] - learning_rate * gradient_value
19
+ x_values.append(new_x)
20
+ return x_values
21
+
22
+ #Streamlit layout
23
+ st.title("Gradient Descent Visualization")
24
+
25
+ #Sidebar inputs
26
+ st.sidebar.header("Parameters")
27
+ starting_point = st.sidebar.slider("Starting Point", -10.0, 10.0, 5.0, 0.1)
28
+ learning_rate = st.sidebar.slider("Learning Rate", 0.01, 1.0, 0.1, 0.01)
29
+ iterations = st.sidebar.slider("Number of Iterations", 1, 50, 10, 1)
30
+
31
+ #Perform gradient descent
32
+ x_values = gradient_descent(starting_point, learning_rate, iterations)
33
+ y_values = [loss_function(x) for x in x_values]
34
+
35
+ #Generate plot
36
+ fig, ax = plt.subplots(figsize=(8, 5))
37
+ x_range = np.linspace(-10, 10, 500)
38
+ y_range = loss_function(x_range)
39
+
40
+ #Plot the loss function
41
+ ax.plot(x_range, y_range, label="Loss Function", color="blue")
42
+ #Plot the gradient descent steps
43
+ ax.scatter(x_values, y_values, color="red", label="Steps", zorder=5)
44
+ ax.plot(x_values, y_values, color="orange", linestyle="--", label="Gradient Descent Path")
45
+
46
+ #Annotate points
47
+ for i, (x, y) in enumerate(zip(x_values, y_values)):
48
+ ax.text(x, y, f"{i}", fontsize=8, ha="right")
49
+
50
+ ax.set_title("Gradient Descent")
51
+ ax.set_xlabel("x")
52
+ ax.set_ylabel("Loss")
53
+ ax.legend()
54
+
55
+ #Display in Streamlit
56
+ st.pyplot(fig)
57
+
58
+ #Display final results
59
+ st.write("### Gradient Descent Results")
60
+ st.write(f"Starting Point: {starting_point}")
61
+ st.write(f"Learning Rate: {learning_rate}")
62
+ st.write(f"Number of Iterations: {iterations}")
63
+ st.write(f"Final x Value: {x_values[-1]:.4f}")
64
+ st.write(f"Final Loss Value: {loss_function(x_values[-1]):.4f}")