Advertisement
Guest User

Untitled

a guest
Feb 19th, 2021
105
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 0.86 KB | None | 0 0
  1. import numpy as np
  2.  
  3. m = 100
  4. X = np.random.rand(100, 2)
  5. y = (X * 10).sum() + np.random.rand(m)
  6. iter = 100
  7.  
  8. # calculating the cost function
  9. theta = np.array([[0], [0]])
  10. def h(X, theta):
  11.     return np.matmul(X, theta)
  12.  
  13. def cost(X, y, theta, m):
  14.     return 1/(2* m) * np.sum((h(X, theta)-y)**2)
  15.  
  16. print(cost(X, y, theta, m))
  17. # 1-variable gradient decent
  18. cost_vals = []
  19. def grad_decent(X, y, theta, m, alpha):
  20.     theta1 = theta[0, 0]
  21.     theta2 = theta[1, 0]
  22.     for i in range(0, iter):
  23.         cost_vals.append(cost(X, y, np.array([[theta1], [theta2]]), m))
  24.         theta1 = theta1 - alpha/m * np.sum((h(X, theta)-y) * X[:, 0])
  25.         theta2 = theta2 - alpha/m * np.sum((h(X, theta)-y) * X[:, 1])
  26.         # print(f"Iteration {i}: ", theta1, ", ", theta2)
  27.        
  28.     return np.array([[theta1], [theta2]]), cost_vals
  29.  
  30. grad_decent(X, y, theta, m, 0.0001)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement