Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- X=[0.5, 2.5]
- Y=[0.2, 0.9]
- def f(w,b,x):
- return 1.0/(1.0+np.exp(-(w*x+b)))
- def error(w,b,x):
- err = 0.0
- for x, y in zip(X,Y):
- fx = f(w,b,x)
- err += 0.5*(fx-y)**2
- return err
- def grad_b(w,b,x,y):
- fx = f(w,b,x)
- return (fx-y)*fx*(1-fx)
- def grad_w(w,b,x,y):
- fx = f(w,b,x)
- return (fx-y)*fx*(1-fx)*x
- def Normal():
- w, b, eta, max_epochs = -2, -2, 1.0, 1000
- for i in range(max_epochs):
- dw, db = 0, 0
- for x, y in zip(X, Y):
- dw += grad_w(w,b,x,y)
- db += grad_b(w,b,x,y)
- w = w-eta*dw
- b = b-eta*db
- er=error(w, b, x)
- print("Error for noraml gradient descent:\t", er)
- def Momentum():
- w,b,eta,epoch,gama,updateb,updatew = -2,-2,1,1000,0.1,0,0
- for i in range(epoch):
- dw,db = 0,0
- for x,y in zip(X,Y):
- dw += grad_w(w,b,x,y)
- db += grad_b(w,b,x,y)
- updatew = gama*updatew+eta*dw
- updateb = gama*updateb+eta*db
- w = w-updatew
- b = b-updateb
- er=error(w,b,x)
- print("Error for momentum gradient descent:\t", er)
- def Nesterov():
- w,b,eta,epoch,gama,updateb,updatew,wlookahead,blookahead = -2,-2,1,1000,0.1,0,0,0,0
- for i in range(epoch):
- dw,db = 0,0
- wlookahead = w-gama*updatew
- blookahead = b-gama*updateb
- for x,y in zip(X,Y):
- dw += grad_w(wlookahead,blookahead,x,y)
- db += grad_b(wlookahead,blookahead,x,y)
- updatew = gama*updatew+eta*dw
- updateb = gama*updateb+eta*db
- w = w-updatew
- b = b-updateb
- er=error(w,b,x)
- print("Error for Nesterov gradient descent:\t",er)
- def Stochastic():
- w,b,eta,epoch = -2,-2,1,1000
- for i in range(epoch):
- dw,db = 0,0
- for x,y in zip(X,Y):
- dw += grad_w(w,b,x,y)
- db += grad_b(w,b,x,y)
- w = w-eta*dw
- b = b-eta*db
- er=error(w,b,x)
- print("Error for Stochastic gradient descent:\t",er)
- def Minibatch():
- w,b,eta,epoch = -2,-2,1,1000
- count=0
- for i in range(epoch):
- dw,db = 0,0
- for x,y in zip(X,Y):
- dw += grad_w(w,b,x,y)
- db += grad_b(w,b,x,y)
- count += 1
- if count%2 == 0:
- w = w-eta*dw
- b = b-eta*db
- er=error(w,b,x)
- print("Error for Minibatch gradient descent:\t",er)
- Normal()
- Momentum()
- Nesterov()
- Stochastic()
- Minibatch()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement