Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # coding:utf-8
- import numpy as np
- import matplotlib.pyplot as plt
- def plot(x, d, x1, x2, e, w, b):
- print np.mean(np.abs(d - p_y_given_x(x, w, b)))
- plt.plot(e)
- plt.show()
- bx = np.arange(-6, 10, 0.1)
- by = -b/w[1] - w[0]/w[1]*bx
- plt.xlim([-5, 10])
- plt.ylim([-5, 9])
- plt.plot(bx, by)
- plt.scatter(x1[:, 0], x1[:, 1], c='g')
- plt.scatter(x2[:, 0], x2[:, 1], c='r')
- plt.show()
- def p_y_given_x(x, w, b):
- def sigmoid(a):
- return 1.0 / (1.0 + np.exp(-a))
- return sigmoid(np.dot(x, w) + b)
- def grad(x, d, w, b):
- error = d - p_y_given_x(x, w, b)
- w_grad = -np.mean(x.T * error, axis=1)
- b_grad = -np.mean(error)
- return w_grad, b_grad
- def GD(x, d, w, b, e, eta=0.10, iteration=700):
- for _ in range(iteration):
- w_grad, b_grad = grad(x, d, w, b)
- w -= eta * w_grad
- b -= eta * b_grad
- e.append(np.mean(np.abs(d - p_y_given_x(x, w, b))))
- return e, w, b
- def SGD(x, d, w, b, e, eta=0.10, iteration=5, minibatch_size=10):
- for _ in range(iteration):
- for index in range(0, x.shape[0], minibatch_size):
- _x = x[index:index + minibatch_size]
- _d = d[index:index + minibatch_size]
- w_grad, b_grad = grad(_x, _d, w, b)
- w -= eta * w_grad
- b -= eta * b_grad
- e.append(np.mean(np.abs(d - p_y_given_x(x, w, b))))
- return e, w, b
- def SGD_momentum(x, d, w, b, e, eta=0.10, mu=0.65, iteration=50, minibatch_size=10):
- wlist, blist = [w], [b]
- def momentum(mu, list):
- return mu * (list[1] - list[0])
- for _ in range(iteration):
- for index in range(0, x.shape[0], minibatch_size):
- _x = x[index:index + minibatch_size]
- _d = d[index:index + minibatch_size]
- w_grad, b_grad = grad(_x, _d, w, b)
- if len(wlist) > 1:
- w -= eta * w_grad + momentum(mu, wlist)
- b -= eta * b_grad + momentum(mu, blist)
- wlist.pop(0)
- blist.pop(0)
- else:
- w -= eta * w_grad
- b -= eta * b_grad
- wlist.append(w)
- blist.append(b)
- e.append(np.mean(np.abs(d - p_y_given_x(x, w, b))))
- return e, w, b
- def SGD_adagrad(x, d, w, b, e, eta=0.10, iteration=50, minibatch_size=10):
- wgrad2sum = np.zeros(x.shape[1])
- bgrad2sum = 0
- for _ in range(iteration):
- for index in range(0, x.shape[0], minibatch_size):
- _x = x[index:index + minibatch_size]
- _d = d[index:index + minibatch_size]
- w_grad, b_grad = grad(_x, _d, w, b)
- wgrad2sum += np.power(w_grad, 2)
- bgrad2sum += np.power(b_grad, 2)
- w -= (eta/np.sqrt(wgrad2sum)) * w_grad
- b -= (eta/np.sqrt(bgrad2sum)) * b_grad
- e.append(np.mean(np.abs(d - p_y_given_x(x, w, b))))
- return e, w, b
- def main():
- m, N = 2, 10000
- x1, x2 = np.random.randn(N, m), np.random.randn(N, m) + np.array([5, 5])
- x = np.vstack((x1, x2))
- d1, d2 = np.zeros(N), np.ones(N)
- d = np.hstack((d1, d2))
- dataset = np.column_stack((x, d))
- np.random.shuffle(dataset)
- x, d = dataset[:, :2], dataset[:, 2]
- w, b = np.random.rand(m), np.random.random()
- # e, w, b = GD(x, d, w, b, list())
- e, w, b = SGD(x, d, w, b, list())
- # e, w, b = SGD_momentum(x, d, w, b, list())
- # e, w, b = SGD_adagrad(x, d, w, b, list())
- plot(x, d, x1, x2, e, w, b)
- if __name__ == "__main__":
- main()
Add Comment
Please, Sign In to add comment