DEEP LEARNING (ACt Func)

You might also like

Download as docx, pdf, or txt
Download as docx, pdf, or txt
You are on page 1of 10

DEEP LEARNING LAB

WEEK3

NAME: I.Aneesh Varma BRANCH: AIML


ROLL NO: 21R21A7326 DATE:27/02/24

PROBLEM STATEMENT: Implementation of Activation functions


PROGRAM:
In [1]:

import numpy as np
In [2]:

# Define input data


data = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
In [3]:

# Normalization of input data


def normalize(data):
mean = np.mean(data, axis=0)
std = np.std(data, axis=0)
normalized_data = (data - mean) / std
return normalized_data
normalized_data = normalize(data)
print("Normalized Data:")
print(normalized_data)
Normalized Data:
[[-1.22474487 -1.22474487 -1.22474487]
[ 0. 0. 0. ]
[ 1.22474487 1.22474487 1.22474487]]
In [4]:

# Activation functions
def softmax(x):
exp_x = np.exp(x - np.max(x, axis=1, keepdims=True))
return exp_x / np.sum(exp_x, axis=1, keepdims=True)
In [5]:
def sigmoid(x):
return 1 / (1 + np.exp(-x))
In [6]:

def dsigmoid(x):
return sigmoid(x) * (1 - sigmoid(x))
In [7]:

# Test activation functions


x = np.array([[1, 2, 3],
[4, 5, 6]])
In [8]:

print("\nSoftmax:")
print(softmax(x))

Softmax:
[[0.09003057 0.24472847 0.66524096]
[0.09003057 0.24472847 0.66524096]]
In [9]:

print("\nSigmoid:")
print(sigmoid(x))

Sigmoid:
[[0.73105858 0.88079708 0.95257413]
[0.98201379 0.99330715 0.99752738]]
In [10]:

print("\nDerivative of Sigmoid (dsigmoid):")


print(dsigmoid(x))

Derivative of Sigmoid (dsigmoid):


[[0.19661193 0.10499359 0.04517666]
[0.01766271 0.00664806 0.00246651]]
In [1]:

import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
x = np.arange(-5, 5, 0.01)
y = 1 / (1 + np.exp(-x))
plt.plot(x,y)
plt.title('Logistic Activation Function')
plt.xlabel('Input')
plt.ylabel('Output');
Sigmoid Function
In [2]:

import matplotlib.pyplot as plt


import numpy as np
import math
In [3]:

x = np.linspace(-20, 20, 200)


z = 1/(1 + np.exp(-x))
In [4]:

plt.plot(x, z)
plt.xlabel("x")
plt.ylabel("Sigmoid(X)")

plt.show()
In [5]:

x = np.linspace(-100, 100, 200)


z = 1/(1 + np.exp(-x))

plt.plot(x, z)
plt.xlabel("x")
plt.ylabel("Sigmoid(X)")

plt.show()
Implementation in Keras
In [6]:

import tensorflow as tf
In [7]:

a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)


b = tf.keras.activations.sigmoid(a)
b.numpy()
Out [7]:
array([2.0611537e-09, 2.6894143e-01, 5.0000000e-01, 7.3105860e-01,
1.0000000e+00], dtype=float32)

dsigmoid
In [29]:

x = np.linspace(-20, 20, 200)


z = 1/(1 + np.exp(-x))
dz=z*(1-z)
In [30]:

plt.plot(x, dz)
plt.xlabel("x")
plt.ylabel("dsigmoid(X)")

plt.show()

Tanh Function
In [8]:

import matplotlib.pyplot as plt


import numpy as np
In [9]:

# Hyperbolic Tangent (htan) Activation Function


def htan(x):
return (np.exp(x) - np.exp(-x))/(np.exp(x) + np.exp(-x))

# htan derivative
def der_htan(x):
return 1 - htan(x) * htan(x)
In [10]:

# Generating data for Graph


x_data = np.linspace(-10,10,100)
y_data = htan(x_data)
dy_data = der_htan(x_data)
In [11]:

plt.plot(x_data, y_data, x_data, dy_data)


plt.title('htan Activation Function & Derivative')
plt.legend(['htan','der_htan'])
plt.grid()
plt.show()

Observations:
Now it’s output is zero centered because its range in between -1 to 1 i.e -1 < output < 1 . Hence
optimization is easier in this method hence in practice it is always preferred over Sigmoid
function .
Implementation in Keras
In [12]:

import tensorflow as tf
In [13]:

a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)


b = tf.keras.activations.tanh(a)
b.numpy()
Out [13]:
array([-0.9950547, -0.7615942, 0. , 0.7615942, 0.9950547],
dtype=float32)

Softmax Function
In [14]:

import matplotlib.pyplot as plt


import numpy as np
In [15]:

def softmax(x):
""" applies softmax to an input x"""
e_x = np.exp(x)
return e_x / e_x.sum()
In [16]:

x = np.array([1, 0, 3, 5])
y = softmax(x)
In [17]:

y, x / x.sum()
Out [17]:
(array([0.01578405, 0.00580663, 0.11662925, 0.86178007]),
array([0.11111111, 0. , 0.33333333, 0.55555556]))
In [18]:

plt.figure(figsize=(10,5))
Out [18]:
<Figure size 1000x500 with 0 Axes>
<Figure size 1000x500 with 0 Axes>
In [19]:
input_series = [0,5,10,15,20]
In [20]:

output_series = [softmax(x) for x in input_series]


plt.plot(input_series, output_series)
plt.show()

Implementation in Keras
In [22]:

inputs = tf.random.normal(shape=(32, 10))


outputs = tf.keras.activations.softmax(inputs)
tf.reduce_sum(outputs[0, :])
Out [22]:
<tf.Tensor: shape=(), dtype=float32, numpy=1.0>

Relu(Rectified linear unit)


In [26]:

# relu
def relu(z):
return np.maximum(0, z)
In [27]:

# Generating data for Graph


x_data = np.linspace(-10,10,100)
y_data = relu(x_data)
In [28]:

plt.plot(x_data, y_data)
plt.title('Relu')
plt.legend(['Relu'])
plt.grid()
plt.show()

In [ ]:

You might also like