Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def model(X, Y, layers_dims, learning_rate=0.01, num_iterations=1000,
- print_cost=True, hidden_layers_activation_fn="relu",
- initialization_method="he"):
- np.random.seed(1)
- # initialize cost list
- cost_list = []
- # initialize parameters
- if initialization_method == "zeros":
- parameters = initialize_parameters_zeros(layers_dims)
- elif initialization_method == "random":
- parameters = initialize_parameters_random(layers_dims)
- else:
- parameters = initialize_parameters_he_xavier(
- layers_dims, initialization_method)
- # iterate over num_iterations
- for i in range(num_iterations):
- # iterate over L-layers to get the final output and the cache
- AL, caches = L_model_forward(
- X, parameters, hidden_layers_activation_fn)
- # compute cost to plot it
- cost = compute_cost(AL, Y)
- # iterate over L-layers backward to get gradients
- grads = L_model_backward(AL, Y, caches, hidden_layers_activation_fn)
- # update parameters
- parameters = update_parameters(parameters, grads, learning_rate)
- # append each 100th cost to the cost list
- if (i + 1) % 100 == 0 and print_cost:
- print("The cost after {} iterations is: {}".format(i + 1, cost))
- if i % 100 == 0:
- cost_list.append(cost)
- # plot the cost curve
- plt.figure(figsize=(12, 8))
- plt.plot(cost_list)
- plt.xlabel("Iterations (per hundreds)", fontsize=14)
- plt.ylabel("Cost", fontsize=14)
- plt.title(
- "Cost curve: learning rate = {} and {} initialization method".format(
- learning_rate, initialization_method), y=1.05, fontsize=16)
- return parameters
Add Comment
Please, Sign In to add comment