4. Train

After creating the graph, we have to create a session and run the graph in our session to train the model.

In [18]:
with tf.Session() as session:

    # run the initialization
    session.run(init)
    
    # visualize on tensorboard
    # tensorboard --logdir='logistic_regression'
    summary_writer =tf.summary.FileWriter('logistic_regression',session.graph)
    
    # keep track of the loss, weight and bias for visualization
    loss_plot = []
    weight_final = []
    bias_final = []
    
    # training loop
    for epoch in range(num_epochs):

        # feeding data to our placeholders
        feed_dict_train = {data: X_train, target: y_train}
        _ , c, prediction_values = session.run([optimizer, loss, prediction], feed_dict=feed_dict_train)
        
        # Save the loss result
        loss_plot.append(c)
        
        # Display logs per 1000 epoch step
        if epoch % 1000 == 0:
            
            print("Epoch:", '%04d' % (epoch+1), "loss=", "{:.9f}".format(c),\
                "W=", session.run(W), "b=", session.run(b))
        
        # Write logs for each epoch
        summary_str = session.run(merged_summary_op, feed_dict=feed_dict_train)  
        summary_writer.add_summary(summary_str,  epoch)
    
    # Store our final weigh and bias.
    weight_final = session.run(W)
    bias_final = session.run(b)
    
    print("\nOptimization Finished!\n")
    print ("Train Accuracy:", accuracy.eval({data: X_train, target: y_train}))
    print ("Test Accuracy:", accuracy.eval({data: X_test, target: y_test}))
Epoch: 0001 loss= 0.704871058 W= [[0.24544136]
 [0.59906495]] b= [[0.0196642]]
Epoch: 1001 loss= 0.029219950 W= [[-1.2218993]
 [ 4.0775237]] b= [[-9.449447]]
Epoch: 2001 loss= 0.016918099 W= [[-0.7046857]
 [ 4.3757663]] b= [[-12.317694]]
Epoch: 3001 loss= 0.012234053 W= [[-0.42154652]
 [ 4.602633  ]] b= [[-14.107158]]
Epoch: 4001 loss= 0.009715495 W= [[-0.22854297]
 [ 4.7850795 ]] b= [[-15.423222]]

Optimization Finished!

Train Accuracy: 1.0
Test Accuracy: 1.0