Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- class NeuralNetwork:
- def __init__(self, eta, numrounds, hidden_nodes):
- self.numrounds = numrounds
- '''
- fit roda até que numrounds
- seja alcançado
- '''
- self.eta = eta #learning rate
- self.hidden_nodes = hidden_nodes
- '''
- Quantidade de nós no hidden layer
- '''
- def fit(self, X, y):
- cols = len(X[0])
- syn0 = 2*np.random.random((cols, self.hidden_nodes)) - 1 # 3x4 matrix of weights ((2 inputs + 1 bias) x 4 nodes in the hidden layer)
- syn1 = 2*np.random.random((self.hidden_nodes,1)) - 1 # 4x1 matrix of weights. (4 nodes x 1 output) - no bias term in the hidden layer.
- for i in xrange(self.numrounds):
- l0 = X
- l1 = self.logistic(np.dot(l0, syn0))
- l2 = self.logistic(np.dot(l1, syn1))
- l2_error = (y - l2)**2
- l2_delta = -l2_error*self.logistic(np.dot(l1, syn1), True)
- l1_error = l2_delta.dot(syn1.T)
- l1_delta = l1_error*self.logistic(np.dot(l0, syn0), True)
- syn1 -= self.eta*l1.T.dot(l2_delta)
- syn0 -= self.eta*l0.T.dot(l1_delta)
- print("Output after training")
- print(l2)
- #fit não deve possuir retorno.
- '''
- o fit deve descobrir a dimensão do problema
- e o número de classes.
- '''
- def predict(self, X):
- pass
- #predict deve retornar as classes para X
- def logistic(self, x, deriv=False): # Note: there is a typo on this line in the video
- if(deriv==True):
- return self.logistic(x)*(1-self.logistic(x))
- return 1/(1+np.exp(-x)) # Note: there is a typo on this line in the video
- X = np.array([[0,0,1],
- [0,1,1],
- [1,0,1],
- [1,1,1]])
- # The output of the exclusive OR function follows.
- #output data
- y = np.array([[0],
- [1],
- [1],
- [0]])
- nn = NeuralNetwork(0.01, 120000, 4)
- nn.fit(X, y)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement