Guest User

Untitled

a guest
Jan 21st, 2019
82
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.01 KB | None | 0 0
  1. import numpy as np
  2.  
  3.  
  4. def sigmoid(x):
  5. return 1.0 / (1.0 + np.exp(-x))
  6.  
  7.  
  8. def sigmoid_prime(x):
  9. return sigmoid(x) * (1.0 - sigmoid(x))
  10.  
  11.  
  12. def tanh(x):
  13. return np.tanh(x)
  14.  
  15.  
  16. def tanh_prime(x):
  17. return 1.0 - np.tanh(x) ** 2
  18.  
  19. def relu(x):
  20. return np.maximum(x,0)
  21.  
  22.  
  23. def relu_prime(x):
  24. x[x <= 0] = 0
  25. x[x > 0] = 1
  26. return x
  27.  
  28. class Layer:
  29.  
  30. def __init__(self, dim, id, act, act_prime, isoutputLayer = False):
  31. self.weight = 2 * np.random.random(dim) - 1
  32. self.delta = None
  33. self.A = None
  34. self.activation = act
  35. self.activation_prime = act_prime
  36. self.isoutputLayer = isoutputLayer
  37. self.id = id
  38.  
  39.  
  40. def forward(self, x):
  41. z = np.dot(x, self.weight)
  42. self.A = self.activation(z)
  43. self.dZ = np.atleast_2d(self.activation_prime(z));
  44.  
  45. return self.A
  46.  
  47. def backward(self, y, rightLayer):
  48. if self.isoutputLayer:
  49. error = self.A - y
  50. self.delta = np.atleast_2d(error * self.dZ)
  51. else:
  52. self.delta = np.atleast_2d(
  53. np.dot(rightLayer.delta, rightLayer.weight.T)
  54. * self.dZ)
  55. return self.delta
  56.  
  57. def update(self, learning_rate, left_a):
  58. a = np.atleast_2d(left_a)
  59. d = np.atleast_2d(self.delta)
  60. ad = a.T.dot(d)
  61. self.weight -= learning_rate * ad
  62.  
  63.  
  64. class NeuralNetwork:
  65.  
  66. def __init__(self, layersDim, activation='tanh'):
  67. if activation == 'sigmoid':
  68. self.activation = sigmoid
  69. self.activation_prime = sigmoid_prime
  70. elif activation == 'tanh':
  71. self.activation = tanh
  72. self.activation_prime = tanh_prime
  73. elif activation == 'relu':
  74. self.activation = relu
  75. self.activation_prime = relu_prime
  76.  
  77. self.layers = []
  78. for i in range(1, len(layersDim) - 1):
  79. dim = (layersDim[i - 1] + 1, layersDim[i] + 1)
  80. self.layers.append(Layer(dim, i, self.activation, self.activation_prime))
  81.  
  82. dim = (layersDim[i] + 1, layersDim[i + 1])
  83. self.layers.append(Layer(dim, len(layersDim) - 1, self.activation, self.activation_prime, True))
  84.  
  85. def fit(self, X, y, learning_rate=0.1, epochs=10000):
  86. # Add column of ones to X
  87. # This is to add the bias unit to the input layer
  88. ones = np.atleast_2d(np.ones(X.shape[0]))
  89. X = np.concatenate((ones.T, X), axis=1)
  90.  
  91.  
  92. for k in range(epochs):
  93.  
  94.  
  95. a=X
  96.  
  97. for l in range(len(self.layers)):
  98. a = self.layers[l].forward(a)
  99.  
  100.  
  101. delta = self.layers[-1].backward(y, None)
  102.  
  103. for l in range(len(self.layers) - 2, -1, -1):
  104. delta = self.layers[l].backward(delta, self.layers[l+1])
  105.  
  106.  
  107.  
  108. a = X
  109. for layer in self.layers:
  110. layer.update(learning_rate, a)
  111. a = layer.A
  112.  
  113. def predict(self, x):
  114. a = np.concatenate((np.ones(1).T, np.array(x)), axis=0)
  115. for l in range(0, len(self.layers)):
  116. a = self.layers[l].forward(a)
  117. return a
  118.  
  119.  
  120.  
  121. if __name__ == '__main__':
  122.  
  123. X = np.array([[0, 0],
  124. [0, 1],
  125. [1, 0],
  126. [1, 1],
  127. [0, 0],
  128. [0, 1],
  129. [1, 0],
  130. [1, 1],
  131. [0, 0],
  132. [0, 1],
  133. [1, 0],
  134. [1, 1]
  135. ])
  136.  
  137. y = np.array([[0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0]]).T
  138.  
  139.  
  140. # tanh
  141. nn = NeuralNetwork([2, 3, 5, 8, 1], activation='tanh')
  142.  
  143. nn.fit(X, y, learning_rate=0.1, epochs=10000)
  144.  
  145. print("\n\nResult with tanh")
  146. for e in X:
  147. print(e, nn.predict(e))
  148.  
  149.  
  150. # sigmoid
  151. nn = NeuralNetwork([2, 3, 4, 1], activation='sigmoid')
  152.  
  153. nn.fit(X, y, learning_rate=0.3, epochs=20000)
  154.  
  155. print("\n\nResult with sigmoid")
  156. for e in X:
  157. print(e, nn.predict(e))
  158.  
  159.  
  160. # relu
  161. nn = NeuralNetwork([2, 3, 4, 1], activation='relu')
  162.  
  163. nn.fit(X, y, learning_rate=0.1, epochs=50000)
  164.  
  165. print("\n\nResult with relu")
  166. for e in X:
  167. print(e, nn.predict(e))
Add Comment
Please, Sign In to add comment