Advertisement
Guest User

Untitled

a guest
Mar 31st, 2020
87
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.96 KB | None | 0 0
  1. def loss(X, t, w, lamb, n):
  2. e_d = np.power(t - np.dot(w,return_phi(X,n).T),2).sum()/2
  3. e_r = lamb*np.power(w,2).sum()/2
  4. return (e_d+e_r)
  5.  
  6.  
  7.  
  8. def gradient(X, t, w, lamb, n):
  9. # print(.shape,t.shape,w.shape)
  10. temp = -(t - np.dot(w,return_phi(X,n).T)).dot(return_phi(X,n))+lamb*w
  11. # print(temp)
  12. return temp
  13.  
  14.  
  15. def gradient_descent(X, t, n, step, lamb):
  16. loss_vals=[]
  17. w_next=np.random.rand(n+1).reshape((1,n+1))/100
  18. cant_stop=True
  19. count = 0
  20. print(X.shape, t.shape, n, step, lamb, w_next.shape)
  21. # print(return_phi(X,n).shape)
  22. while cant_stop:
  23. w_old = w_next
  24. w_next = w_old - step * gradient(X, t, w_old, lamb, n)
  25. # print(w_old, w_next)
  26. loss_vals.append(loss(X,t,w_next,lamb,n))
  27. if np.linalg.norm(w_old-w_next) < eps*np.linalg.norm(w_next) +eps0:
  28. cant_stop = False
  29. count += 1
  30. print(loss_vals[-1], count)
  31.  
  32. return loss_vals, w_next
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement