Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def logistic_regression(X, y, eta=0.001, iter=100, alpha=0.5):
- """Logistic regression
- Args:
- X - feature matrix
- y - target vector
- eta - learning rate
- iter - number of iterations
- alpha - regularization parameter
- Returns:
- theta - weight estimates
- losses - historical loss data
- thetas - historical thetas data
- """
- _, n = X.shape
- theta = np.random.uniform(size=n)
- losses = np.zeros(iter)
- thetas = np.zeros(iter*3).reshape(iter, 3)
- #### your code (start)
- for i in range(iter):
- output = sigmoid(X @ theta)
- loss = binary_cross_entropy(output, y) + alpha / 2 * np.sum(theta[1:] ** 2)
- if i % 10 == 0:
- print(f'\r{loss}', end='')
- grad = (y - output) @ X
- grad[1:] += alpha * theta[1:] # regularization term
- theta += eta * grad
- losses[i] = loss
- thetas[i] = theta.copy()
- #### your code (end)
- return theta, losses, thetas
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement