5. Training Time !!!

After creating the graph, we have run a session to train our model.

In [19]:
with tf.Session() as session:

    # keep track of the loss, weight and bias for plotting
    loss_values = []
    weight_values = []
    bias_value = []

    # run the initialization
    session.run(init)

    # visualize on tensorboard
    # tensorboard --logdir='shallow_neural_net'
    summary_writer = tf.summary.FileWriter('shallow_neural_net', session.graph)

    # training loop
    for epoch in range(training_epochs):

        # feeding data to our network through the placeholders.
        feed_train = {data: X_train, target: y_train}
        feed_test = {data: X_test, target: y_test}

        _ , value, prediction_values= session.run([optimizer, loss, values], feed_dict=feed_train)

        # storing the results of our loss values
        loss_values.append(value)

        # Display logs per 1000 epochs
        if(epoch % 50) == 0:
            print("Epoch:", '%04d' % (epoch+1), "loss=", "{:.9f}".format(value))

    # Write logs for each epoch
    summary_str = session.run(merged_summary_op, feed_train)
    summary_writer.add_summary(summary_str, epoch)

    print("Epoch:", '%04d' % (epoch+1), "loss=", "{:.9f}".format(value))

    _, loss_test, prediction_values_test = session.run([optimizer, loss, values], feed_dict=feed_test)

    # Getting the results from our output layer
    weight_values = session.run(weights)
    bias_values = session.run(biases)

    print("\nOptimization Finished\n")
    print("Train Accuracy: {:.2f}%".format(accuracy.eval(feed_train)*100))
    print("Test Accuracy: {:.2f}%".format(accuracy.eval(feed_test)*100))
Epoch: 0001 loss= 0.693126500
Epoch: 0051 loss= 0.288471431
Epoch: 0101 loss= 0.061979480
Epoch: 0151 loss= 0.029581819
Epoch: 0201 loss= 0.018323844
Epoch: 0251 loss= 0.011925369
Epoch: 0301 loss= 0.007823243
Epoch: 0351 loss= 0.005666740
Epoch: 0400 loss= 0.004416701

Optimization Finished

Train Accuracy: 100.00%
Test Accuracy: 100.00%