In [9]:
# the weights and biases of our model
# Input layer
weights0 = model.layers[0].get_weights()[0]
biases0 = model.layers[0].get_weights()[1]
print("First hidden layer weights",weights0.shape,":\n",weights0)
print("First hidden layer biases",biases0.shape,":\n",biases0)

# Hidden layer
weights1 = model.layers[1].get_weights()[0]
biases1 = model.layers[1].get_weights()[1]
print("\nSecond hidden layer weights",weights1.shape,":\n",weights1)
print("Second hidden layer biases",biases1.shape,":\n",biases1)

# Output layer
weights2 = model.layers[2].get_weights()[0]
biases2 = model.layers[2].get_weights()[1]
print("\nOutput layer weights",weights2.shape,":\n",weights2)
print("Output layer biases",biases2.shape,":\n",biases2)
First hidden layer weights (2, 14) :
 [[ 3.18708643e-02  3.76880914e-01  3.34010243e-01  3.44292700e-01
   2.95141429e-01 -4.38812077e-01 -9.98875722e-02 -1.01917222e-01
   8.81723404e-01  8.01649585e-04  8.38082790e-01  2.49371871e-01
  -2.79023498e-01 -4.24151689e-01]
 [-5.62948525e-01 -4.53839749e-02  1.98968753e-01  4.54136908e-01
  -1.47138059e-01  1.57493204e-02 -1.24175236e-01  6.21757746e-01
   1.90287322e-01  2.40303844e-01  4.80566025e-01  6.19599819e-01
  -8.93215954e-01  3.60621035e-01]]
First hidden layer biases (14,) :
 [-2.4618995  -2.3834002  -1.9518676   2.317396   -2.1820586  -2.3574595
 -2.5948555  -0.89872456 -2.054627    2.9490285   1.6490346  -3.145205
  0.6876251  -1.7512023 ]

Second hidden layer weights (14, 8) :
 [[ 1.669014    1.6414063   0.9588886   3.164528   -0.02196597 -0.32785144
   2.1990228   0.91751957]
 [-4.003814   -1.1266284  -1.9269496  -3.1064603   1.265003   -0.24914873
   1.6380771   1.1263297 ]
 [-0.9020978  -3.574911   -2.7691925  -3.5347846   1.2291954  -0.20540085
   1.3661168   1.520551  ]
 [ 0.4559783   0.46875617  0.4568873   1.6658986   1.0361202   0.06128565
   0.17555745 -0.73306423]
 [-0.48316848  2.4034057   1.3663973  -4.7495275   0.93666404 -0.34562665
   2.495259    2.0074522 ]
 [ 3.4751034   2.882187    2.281648    4.1502266  -0.89567846 -0.7138774
  -2.2977114   1.7146146 ]
 [ 1.7384508   0.9689292   1.243842    0.7127554   0.48103505  1.9671367
   0.54398465  0.37887076]
 [ 0.9668143   0.4723948   0.70032674  1.6021389  -0.5229516   1.9490854
   0.43180677  0.29674414]
 [-2.8693395   0.19718704  0.69138134  2.2705007  -0.42728224  1.1143429
   0.04807813  0.27582458]
 [-2.6413574  -1.7130142  -1.6786942   0.62147456  0.9327566  -0.22635815
  -0.891202   -0.42524058]
 [-1.8769376   0.60468924 -0.05867555 -1.4351044   1.5068598   0.6448796
  -0.27241063 -0.09106335]
 [ 4.7218504   0.13688222  0.5859995  -5.164208   -0.1535627   0.19723855
   1.2152327   2.1972518 ]
 [ 0.21730302  0.12980174  0.0838487  -0.32059854 -3.9820185  -0.41070747
  -0.66641855 -0.25759152]
 [ 0.89308274  1.1042061   0.85377914 -3.845558    2.8300855  -1.5417657
   1.462573    2.1608164 ]]
Second hidden layer biases (8,) :
 [-1.8276167  -1.727654   -1.0377008  -0.15666379  0.38927427 -1.3372844
 -1.5511938  -1.2378241 ]

Output layer weights (8, 1) :
 [[-6.0222883]
 [-3.2982488]
 [-2.6216078]
 [ 4.985029 ]
 [-1.9720467]
 [ 2.5670073]
 [ 5.369243 ]
 [ 5.5304317]]
Output layer biases (1,) :
 [-4.600279]