| | |
| | |
| | import numpy as np |
| | import matplotlib.pyplot as plt |
| |
|
| | import matplotlib_inline |
| | matplotlib_inline.backend_inline.set_matplotlib_formats('svg') |
| | |
| | |
| |
|
| | def fx(x): |
| | return np.cos(2*np.pi*x) + x**2 |
| |
|
| | |
| | def deriv(x): |
| | return -2*(np.pi*np.sin(2*np.pi*x)-x) |
| |
|
| | |
| | |
| |
|
| | |
| | x = np.linspace(-1, 1, 2001) |
| |
|
| | |
| | plt.plot(x,fx(x), x, deriv(x)) |
| | plt.xlim(x[[0,-1]]) |
| | plt.grid() |
| | plt.xlabel('x') |
| | plt.ylabel('f(x)') |
| | plt.legend(['y','dy']) |
| | plt.show() |
| | |
| | |
| | localmin = np.random.choice(x,1) |
| |
|
| | print(localmin) |
| |
|
| | |
| | learning_rate = 0.01 |
| | training_epochs = 100 |
| |
|
| | |
| | for i in range(training_epochs): |
| | grad = deriv(localmin) |
| | localmin = localmin - learning_rate * grad |
| |
|
| | print(localmin) |
| | |
| |
|
| | |
| | plt.plot(x,fx(x), x, deriv(x)) |
| | plt.plot(localmin, deriv(localmin), 'ro') |
| | plt.plot(localmin, fx(localmin), 'ro') |
| |
|
| | plt.xlim(x[[0,-1]]) |
| | plt.grid() |
| | plt.xlabel('x') |
| | plt.ylabel('f(x)') |
| | plt.legend(['f(x)','df','f(x) min']) |
| | plt.title('Emprical local minimum: %s' % localmin[0]) |
| | plt.show() |
| |
|
| | |
| | |
| | localmin = np.random.choice(x,1) |
| |
|
| | |
| | learning_rate = 0.01 |
| | training_epochs = 100 |
| |
|
| | |
| | modelparams = np.zeros((training_epochs,2)) |
| | for i in range(training_epochs): |
| | grad = deriv(localmin) |
| | localmin = localmin - learning_rate * grad |
| | modelparams[i,0] = localmin |
| | modelparams[i,1] = grad |
| | |
| |
|
| | |
| | fig, ax = plt.subplots(1,2, figsize=(12,4)) |
| |
|
| | for i in range(2): |
| | ax[i].plot(modelparams[:,i], 'o-') |
| | ax[i].set_xlabel('Iterations') |
| | ax[i].set_title(f'Final estimated minimum: {localmin[0]:.5f}') |
| |
|
| |
|
| | ax[0].set_xlabel('Local minimum') |
| | ax[1].set_ylabel('Derivative') |
| |
|
| | plt.show() |
| | |
| | |
| | |
| | localmin = np.array([0]) |
| |
|
| | |
| | learning_rate = 0.01 |
| | training_epochs = 100 |
| |
|
| | |
| | modelparams = np.zeros((training_epochs,2)) |
| | for i in range(training_epochs): |
| | grad = deriv(localmin) |
| | localmin = localmin - learning_rate * grad |
| | modelparams[i,0] = localmin |
| | modelparams[i,1] = grad |
| | |
| |
|
| | |
| | fig, ax = plt.subplots(1,2, figsize=(12,4)) |
| |
|
| | for i in range(2): |
| | ax[i].plot(modelparams[:,i], 'o-') |
| | ax[i].set_xlabel('Iterations') |
| | ax[i].set_title(f'Final estimated minimum: {localmin[0]:.5f}') |
| |
|
| |
|
| | ax[0].set_xlabel('Local minimum') |
| | ax[1].set_ylabel('Derivative') |
| |
|
| | plt.show() |
| | |
| |
|