Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def linearRegression_Loss(X, t):
- # Max iterations, or step count, for gradient descent
- maxIters = 100
- # Step size, or learning rate
- epsilon = .01
- # Initalize beta to be an array the same size of features of x[i], then set all weights to zero
- beta = np.array(X[i].shape())
- initializeWeights(beta)
- print("Features length: " + len(X[i]))
- print("Weights length: " + len(beta))
- return beta
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement