Advertisement
Guest User

Untitled

a guest
Mar 24th, 2019
64
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.00 KB | None | 0 0
  1. def forward_prop_this_layer(self, A_prev, W_curr, b_curr, activation_function):
  2. z_curr = np.dot(W_curr, A_prev) + b_curr
  3.  
  4. if activation_function is 'relu':
  5. activation = relu
  6. elif activation_function is 'sigmoid':
  7. activation = sigmoid
  8. else:
  9. raise Exception(f"{activation_function} is currently not supported, Only sigmoid, relu are supported")
  10.  
  11. return activation(z_curr), z_curr
  12.  
  13. def forward(self, X):
  14. cache = {}
  15. A_current = X
  16. for layer_id_prev, layer in enumerate(self.architecture):
  17. current_layer_id = layer_id_prev+1
  18.  
  19. A_previous = A_current
  20. activation = layer['activation']
  21.  
  22. W_curr = self.params['W'+str(current_layer_id)]
  23. b_curr = self.params['b'+str(current_layer_id)]
  24.  
  25. A_current, Z_curr = forward_prop_this_layer(A_previous, W_curr,
  26. b_curr, activation)
  27.  
  28. cache['A'+str(layer_id_prev)] = A_previous
  29. cache['Z'+str(current_layer_id)] = Z_curr
  30.  
  31. return A_current, cache
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement