Advertisement
Guest User

Untitled

a guest
Oct 22nd, 2019
102
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.44 KB | None | 0 0
  1. import numpy as np
  2.  
  3. X=[0.5, 2.5]
  4. Y=[0.2, 0.9]
  5.  
  6. def f(w,b,x):
  7. return 1.0/(1.0+np.exp(-(w*x+b)))
  8.  
  9. def error(w,b,x):
  10. err = 0.0
  11. for x, y in zip(X,Y):
  12. fx = f(w,b,x)
  13. err += 0.5*(fx-y)**2
  14. return err
  15.  
  16. def grad_b(w,b,x,y):
  17. fx = f(w,b,x)
  18. return (fx-y)*fx*(1-fx)
  19.  
  20. def grad_w(w,b,x,y):
  21. fx = f(w,b,x)
  22. return (fx-y)*fx*(1-fx)*x
  23.  
  24. def Normal():
  25. w, b, eta, max_epochs = -2, -2, 1.0, 1000
  26. for i in range(max_epochs):
  27. dw, db = 0, 0
  28. for x, y in zip(X, Y):
  29. dw += grad_w(w,b,x,y)
  30. db += grad_b(w,b,x,y)
  31. w = w-eta*dw
  32. b = b-eta*db
  33. er=error(w, b, x)
  34. print("Error for noraml gradient descent:\t", er)
  35.  
  36.  
  37. def Momentum():
  38. w,b,eta,epoch,gama,updateb,updatew = -2,-2,1,1000,0.1,0,0
  39. for i in range(epoch):
  40. dw,db = 0,0
  41. for x,y in zip(X,Y):
  42. dw += grad_w(w,b,x,y)
  43. db += grad_b(w,b,x,y)
  44. updatew = gama*updatew+eta*dw
  45. updateb = gama*updateb+eta*db
  46. w = w-updatew
  47. b = b-updateb
  48. er=error(w,b,x)
  49. print("Error for momentum gradient descent:\t", er)
  50.  
  51.  
  52. def Nesterov():
  53. w,b,eta,epoch,gama,updateb,updatew,wlookahead,blookahead = -2,-2,1,1000,0.1,0,0,0,0
  54. for i in range(epoch):
  55. dw,db = 0,0
  56. wlookahead = w-gama*updatew
  57. blookahead = b-gama*updateb
  58. for x,y in zip(X,Y):
  59. dw += grad_w(wlookahead,blookahead,x,y)
  60. db += grad_b(wlookahead,blookahead,x,y)
  61. updatew = gama*updatew+eta*dw
  62. updateb = gama*updateb+eta*db
  63. w = w-updatew
  64. b = b-updateb
  65. er=error(w,b,x)
  66. print("Error for Nesterov gradient descent:\t",er)
  67.  
  68. def Stochastic():
  69. w,b,eta,epoch = -2,-2,1,1000
  70. for i in range(epoch):
  71. dw,db = 0,0
  72. for x,y in zip(X,Y):
  73. dw += grad_w(w,b,x,y)
  74. db += grad_b(w,b,x,y)
  75. w = w-eta*dw
  76. b = b-eta*db
  77. er=error(w,b,x)
  78. print("Error for Stochastic gradient descent:\t",er)
  79.  
  80. def Minibatch():
  81. w,b,eta,epoch = -2,-2,1,1000
  82. count=0
  83. for i in range(epoch):
  84. dw,db = 0,0
  85. for x,y in zip(X,Y):
  86. dw += grad_w(w,b,x,y)
  87. db += grad_b(w,b,x,y)
  88. count += 1
  89. if count%2 == 0:
  90. w = w-eta*dw
  91. b = b-eta*db
  92. er=error(w,b,x)
  93. print("Error for Minibatch gradient descent:\t",er)
  94.  
  95. Normal()
  96. Momentum()
  97. Nesterov()
  98. Stochastic()
  99. Minibatch()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement