Advertisement
Guest User

Untitled

a guest
Sep 25th, 2017
70
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.62 KB | None | 0 0
  1. from numpy import *
  2.  
  3.  
  4. def compute_error_for_points(c, m, points):
  5. totalError = 0
  6. # Iterate
  7. for i in range(0, len(points)):
  8. x = points[i, 0]
  9. y = points[i, 1]
  10. totalError += (y - (m * x + c)) ** 2
  11. return totalError / float(len(points))
  12.  
  13.  
  14. def step_gradient(c_current, m_current, points, learning_rate):
  15. # Gradient descent
  16. c_gradient = 0
  17. m_gradient = 0
  18. N = float(len(points))
  19. # Iterate
  20. for i in range(0, len(points)):
  21. x = points[i, 0]
  22. y = points[i, 1]
  23. c_gradient += -(2 / N) * (y - ((m_current * x) + c_current))
  24. m_gradient += -(2 / N) * x * (y - ((m_current * x) + c_current))
  25. # Update m and c
  26. new_c = c_current - (learning_rate * c_gradient)
  27. new_m = m_current - (learning_rate * m_gradient)
  28. return [new_c, new_m]
  29.  
  30.  
  31. def gradient_descent_runner(points, starting_c, starting_m, learning_rate, num_iterations):
  32. c = starting_c
  33. m = starting_m
  34. # Iterate
  35. for i in range(num_iterations):
  36. c, m = step_gradient(c, m, array(points), learning_rate)
  37. return [c, m]
  38.  
  39.  
  40. def run():
  41. points = genfromtxt('data.csv', delimiter=',')
  42. # Hyperparameter
  43. learning_rate = 0.0001
  44. # Initial values: y = mx + c
  45. initial_c = 0
  46. initial_m = 0
  47. # Iterations
  48. num_iterations = 1000
  49. # Optimal values for m and c
  50. [c, m] = gradient_descent_runner(points, initial_c, initial_m, learning_rate, num_iterations)
  51. # Results
  52. error = compute_error_for_points(c, m, points)
  53. print("Optimized after {0} iterations: m = {1}, c = {2} and error = {3}".format(num_iterations, m, c, error))
  54.  
  55.  
  56. if __name__ == '__main__':
  57. run()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement