def sigmoid(Z):
A = 1/(1+np.exp(-Z))
cache = Z
return A, cache
def relu(Z):
A = np.maximum(0,Z)
cache = Z
return A, cache
def tanh(Z):
A = (np.exp(Z)-np.exp(-Z))/(np.exp(Z)+np.exp(-Z))
cache = Z
return A, cache
def relu_backward(dA, Z):
dZ = np.array(dA, copy=True) # just converting dz to a correct object.
# When z <= 0, you should set dz to 0 as well.
dZ[Z <= 0] = 0
return dZ
def sigmoid_backward(dA, Z):
s = 1/(1+np.exp(-Z))
dZ = dA * s * (1-s)
return dZ
def tanh_backward(dA, Z):
a, cache = tanh(Z)
dZ = dA * (1-a**2)
return dZ