Skip to content

Instantly share code, notes, and snippets.

@Bollegala
Last active August 29, 2015 14:23
Show Gist options
  • Save Bollegala/b422cedc7212117477a0 to your computer and use it in GitHub Desktop.
Save Bollegala/b422cedc7212117477a0 to your computer and use it in GitHub Desktop.
"""
This script demonstrates the optimization for a oblong quadratic function
using stochastic gradient descent (SGD), SGD with classical momentum (CM),
and SGD with Nestrov's accelerated gradient (NAG).
You will require matplotlib and numpy to run this example.
Danushka Bollegala.
20th June 2015.
"""
from matplotlib import pyplot as plt
import numpy
import sys
# function is f(x,y) = alpha * x ** 2 + beta * y ** 2
alpha = 100
beta = 1
eta = 0.01 # learning rate.
mu = 0.9 # momentum coefficient
N = 100 # no. of iterations
def f(x):
return (alpha * x[0] ** 2) + (beta * x[1] ** 2)
def g(x):
return numpy.array([2 * alpha * x[0], 2 * beta * x[1]])
def sgd(x):
L = [x]
for i in range(N):
x = x - eta * g(x)
print i, x
L.append(x)
return L
def cm(x):
L = [x]
v = numpy.zeros(2)
for i in range(N):
v = mu * v - eta * g(x)
x = x + v
print i, x
L.append(x)
return L
def nag(x):
L = [x]
v = numpy.zeros(2)
for i in range(N):
v = mu * v - eta * g(mu * v + x)
x = x + v
print i, x
L.append(x)
return L
def plot(datapoints, s, l):
xpoints = []
ypoints = []
for x in datapoints:
xpoints.append(x[0])
ypoints.append(x[1])
plt.plot(xpoints, ypoints, s, label=l)
pass
def process(n):
x = numpy.array([1.0, 100.0])
if n == 0:
sgd_points = sgd(x)
plot(sgd_points, 'go-', "SGD")
if n == 1:
cm_points = cm(x)
plot(cm_points, 'ro-', "CM")
if n == 2:
nag_points = nag(x)
plot(nag_points, 'bo-', "NAG")
plt.legend()
plt.show()
pass
if __name__ == '__main__':
process(int(sys.argv[1]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment