Professional Documents
Culture Documents
DEEP LEARNING (ACt Func)
DEEP LEARNING (ACt Func)
DEEP LEARNING (ACt Func)
WEEK3
import numpy as np
In [2]:
# Activation functions
def softmax(x):
exp_x = np.exp(x - np.max(x, axis=1, keepdims=True))
return exp_x / np.sum(exp_x, axis=1, keepdims=True)
In [5]:
def sigmoid(x):
return 1 / (1 + np.exp(-x))
In [6]:
def dsigmoid(x):
return sigmoid(x) * (1 - sigmoid(x))
In [7]:
print("\nSoftmax:")
print(softmax(x))
Softmax:
[[0.09003057 0.24472847 0.66524096]
[0.09003057 0.24472847 0.66524096]]
In [9]:
print("\nSigmoid:")
print(sigmoid(x))
Sigmoid:
[[0.73105858 0.88079708 0.95257413]
[0.98201379 0.99330715 0.99752738]]
In [10]:
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
x = np.arange(-5, 5, 0.01)
y = 1 / (1 + np.exp(-x))
plt.plot(x,y)
plt.title('Logistic Activation Function')
plt.xlabel('Input')
plt.ylabel('Output');
Sigmoid Function
In [2]:
plt.plot(x, z)
plt.xlabel("x")
plt.ylabel("Sigmoid(X)")
plt.show()
In [5]:
plt.plot(x, z)
plt.xlabel("x")
plt.ylabel("Sigmoid(X)")
plt.show()
Implementation in Keras
In [6]:
import tensorflow as tf
In [7]:
dsigmoid
In [29]:
plt.plot(x, dz)
plt.xlabel("x")
plt.ylabel("dsigmoid(X)")
plt.show()
Tanh Function
In [8]:
# htan derivative
def der_htan(x):
return 1 - htan(x) * htan(x)
In [10]:
Observations:
Now it’s output is zero centered because its range in between -1 to 1 i.e -1 < output < 1 . Hence
optimization is easier in this method hence in practice it is always preferred over Sigmoid
function .
Implementation in Keras
In [12]:
import tensorflow as tf
In [13]:
Softmax Function
In [14]:
def softmax(x):
""" applies softmax to an input x"""
e_x = np.exp(x)
return e_x / e_x.sum()
In [16]:
x = np.array([1, 0, 3, 5])
y = softmax(x)
In [17]:
y, x / x.sum()
Out [17]:
(array([0.01578405, 0.00580663, 0.11662925, 0.86178007]),
array([0.11111111, 0. , 0.33333333, 0.55555556]))
In [18]:
plt.figure(figsize=(10,5))
Out [18]:
<Figure size 1000x500 with 0 Axes>
<Figure size 1000x500 with 0 Axes>
In [19]:
input_series = [0,5,10,15,20]
In [20]:
Implementation in Keras
In [22]:
# relu
def relu(z):
return np.maximum(0, z)
In [27]:
plt.plot(x_data, y_data)
plt.title('Relu')
plt.legend(['Relu'])
plt.grid()
plt.show()
In [ ]: