Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def forward_prop_this_layer(self, A_prev, W_curr, b_curr, activation_function):
- z_curr = np.dot(W_curr, A_prev) + b_curr
- if activation_function is 'relu':
- activation = relu
- elif activation_function is 'sigmoid':
- activation = sigmoid
- else:
- raise Exception(f"{activation_function} is currently not supported, Only sigmoid, relu are supported")
- return activation(z_curr), z_curr
- def forward(self, X):
- cache = {}
- A_current = X
- for layer_id_prev, layer in enumerate(self.architecture):
- current_layer_id = layer_id_prev+1
- A_previous = A_current
- activation = layer['activation']
- W_curr = self.params['W'+str(current_layer_id)]
- b_curr = self.params['b'+str(current_layer_id)]
- A_current, Z_curr = forward_prop_this_layer(A_previous, W_curr,
- b_curr, activation)
- cache['A'+str(layer_id_prev)] = A_previous
- cache['Z'+str(current_layer_id)] = Z_curr
- return A_current, cache
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement