In [15]:
def L_layer_model(X, Y, layers_dims,activation, learning_rate=0.0075, num_iterations=3000, print_cost=False): #lr was 0.009

    np.random.seed(1)
    costs = []                         # keep track of cost
    
    # Parameters initialization.
    parameters = initialize_parameters_deep(layers_dims)
    
    # Loop (gradient descent)
    for i in range(0, num_iterations):

        # Forward propagation: [LINEAR -> RELU]*(L-1) -> ACTIVATION -> SIGMOID.
        AL, caches = L_model_forward(X, parameters, activation)
        
        # Compute cost.
        cost = compute_cost(AL, Y)
    
        # Backward propagation.
        grads = L_model_backward(AL, Y, caches, activation)
 
        # Update parameters.
        parameters = update_parameters(parameters, grads, learning_rate)
                
        costs.append(cost)
    
    return parameters, costs