Advertisement
Guest User

Untitled

a guest
Jun 25th, 2019
70
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.06 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3.  
  4. list_to_vec = lambda x: np.reshape(x, (len(x), 1))
  5.  
  6. def h(x, theta):
  7. return x @ theta
  8.  
  9. def error(x, y, theta):
  10. return h(x, theta) - list_to_vec(y)
  11.  
  12. def cost(x, y, theta):
  13. diff = error(x, y, theta)
  14. return np.sum(diff.T @ diff) / 2
  15.  
  16. class LearningAlgorithms:
  17. alpha = 0.01
  18. threshold = 1e-4
  19.  
  20. @staticmethod
  21. def analytic_solution(x, y):
  22. return list_to_vec(np.linalg.inv(x.T @ x) @ x.T @ y)
  23.  
  24. @staticmethod
  25. def batch_gradient_descent(x, y, theta):
  26. while True:
  27. for j in range(theta.shape[0]):
  28. summand = -error(x, y, theta).T @ list_to_vec(x[:, j])
  29. diff = LearningAlgorithms.alpha * np.sum(summand)
  30. theta[j] += diff
  31. if diff < LearningAlgorithms.threshold:
  32. return theta
  33.  
  34. @staticmethod
  35. def stochastic_gradient_descent(x, y, theta):
  36. while True:
  37. for i in range(x.shape[0]):
  38. for j in range(theta.shape[0]):
  39. diff = LearningAlgorithms.alpha * (y[i] - h(x[i], theta))*x[i][j]
  40. theta[j] += LearningAlgorithms.alpha * diff
  41. if diff < LearningAlgorithms.threshold:
  42. return theta
  43.  
  44.  
  45. # load the data from a text file
  46. # imagine data as a m x n matrix (m examples, n columns)
  47. data = np.loadtxt('data.txt')
  48.  
  49. # in every row, the input variables are the first n - 1 columns
  50. x = data[:,:-1]
  51.  
  52. # we're going to need an intercept/bias term for a complete linear parameterization, independent of x
  53. # we call that x0 identically equal to unity
  54. x0 = np.ones((x.shape[0],1))
  55. x = np.hstack((x0, x))
  56.  
  57. # in every row, the output variable is the last column
  58. y = data[:,-1]
  59.  
  60. # theta are the weights/parameters
  61. theta = np.ones((x.shape[1], 1))
  62. print("Initial Cost: {}".format(cost(x, y, theta)))
  63. theta = LearningAlgorithms.stochastic_gradient_descent(x, y, theta)
  64. print("Cost after learning: {}".format(cost(x, y, theta)))
  65.  
  66.  
  67. plt.scatter(x[:,1:], y)
  68. plt.plot(x[:,1:], h(x, theta))
  69. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement