After creating the graph, we have run a session to train our model.
with tf.Session() as session:
# keep track of the loss, weight and bias for plotting
loss_values = []
weight_values = []
bias_value = []
# run the initialization
session.run(init)
# visualize on tensorboard
# tensorboard --logdir='shallow_neural_net'
summary_writer = tf.summary.FileWriter('shallow_neural_net', session.graph)
# training loop
for epoch in range(training_epochs):
# feeding data to our network through the placeholders.
feed_train = {data: X_train, target: y_train}
feed_test = {data: X_test, target: y_test}
_ , value, prediction_values= session.run([optimizer, loss, values], feed_dict=feed_train)
# storing the results of our loss values
loss_values.append(value)
# Display logs per 1000 epochs
if(epoch % 50) == 0:
print("Epoch:", '%04d' % (epoch+1), "loss=", "{:.9f}".format(value))
# Write logs for each epoch
summary_str = session.run(merged_summary_op, feed_train)
summary_writer.add_summary(summary_str, epoch)
print("Epoch:", '%04d' % (epoch+1), "loss=", "{:.9f}".format(value))
_, loss_test, prediction_values_test = session.run([optimizer, loss, values], feed_dict=feed_test)
# Getting the results from our output layer
weight_values = session.run(weights)
bias_values = session.run(biases)
print("\nOptimization Finished\n")
print("Train Accuracy: {:.2f}%".format(accuracy.eval(feed_train)*100))
print("Test Accuracy: {:.2f}%".format(accuracy.eval(feed_test)*100))