Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- m = 100
- X = np.random.rand(100, 2)
- y = (X * 10).sum() + np.random.rand(m)
- iter = 100
- # calculating the cost function
- theta = np.array([[0], [0]])
- def h(X, theta):
- return np.matmul(X, theta)
- def cost(X, y, theta, m):
- return 1/(2* m) * np.sum((h(X, theta)-y)**2)
- print(cost(X, y, theta, m))
- # 1-variable gradient decent
- cost_vals = []
- def grad_decent(X, y, theta, m, alpha):
- theta1 = theta[0, 0]
- theta2 = theta[1, 0]
- for i in range(0, iter):
- cost_vals.append(cost(X, y, np.array([[theta1], [theta2]]), m))
- theta1 = theta1 - alpha/m * np.sum((h(X, theta)-y) * X[:, 0])
- theta2 = theta2 - alpha/m * np.sum((h(X, theta)-y) * X[:, 1])
- # print(f"Iteration {i}: ", theta1, ", ", theta2)
- return np.array([[theta1], [theta2]]), cost_vals
- grad_decent(X, y, theta, m, 0.0001)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement