Download as pdf or txt
Download as pdf or txt
You are on page 1of 6

import numpy as np

import matplotlib.pyplot as plt

from sklearn.metrics import mean_squared_error

from mpl_toolkits.mplot3d import Axes3D

X = np.array([32.50234527, 53.42680403, 61.53035803, 47.47563963, 59.81320787,

55.14218841, 52.21179669, 39.29956669, 48.10504169, 52.55001444,

45.41973014, 54.35163488, 44.1640495 , 58.16847072, 56.72720806,

48.95588857, 44.68719623, 60.29732685, 45.61864377, 38.81681754])

y = np.array([31.70700585, 68.77759598, 62.5623823 , 71.54663223, 87.23092513,

78.21151827, 79.64197305, 59.17148932, 75.3312423 , 71.30087989,

55.16567715, 82.47884676, 62.00892325, 75.39287043, 81.43619216,

60.72360244, 82.89250373, 97.37989686, 48.84715332, 56.87721319])

plt.plot(X,y,'o')

plt.xlabel('X')

plt.ylabel('Y')

plt.title('Input Data')

plt.show()

class Neuron:

def _init_(self):

self.w = None

self.b = None

def linear_activation(self, x):

return (x * self.w) + self.b

def mse(self,ew,eb,X,Y):

err = X * ew + eb

return mean_squared_error(Y, err)

def grad_w_mse(self, x, y):

y_pred = self.linear_activation(x)
return - (1 * np.sum((y - y_pred) * x )/20)

def grad_b_mse(self, x, y):

y_pred = self.linear_activation(x)

return - (1* np.sum((y - y_pred))/20)

def fit(self, X, Y, epochs=1000, learning_rate=0.01):

self.w = 0.1

self.b = 0.1

prev_loss = 0

loss = []

weight = []

bias = []

for i in range(epochs):

Y_pred = self.linear_activation(X)

curr_loss = mean_squared_error(Y, Y_pred)

if(abs(prev_loss - curr_loss) <= 0.00001):

break

dw = 0

db = 0

for x, y in zip(X, Y):

dw = self.grad_w_mse(x, y)

db = self.grad_b_mse(x, y)

self.w -= learning_rate * dw

self.b -= learning_rate * db

Y_pred = self.linear_activation(X)

weight.append(self.w)

bias.append(self.b)

loss.append(mean_squared_error(Y, Y_pred))

prev_loss = curr_loss
curr_loss = mean_squared_error(Y, Y_pred)

error = np.zeros(shape=(len(weight), len(bias)))

for i in range(len(weight)):

for j in range(len(bias)):

error[i][j] = self.mse(weight[i],bias[j],X,Y)

levels = np.sort(np.array(loss))

weight = np.array(weight)

fig = plt.figure()

axes = fig.gca(projection ='3d')

axes.plot_surface(weight, bias, error)

plt.title("3D - Weight, Bias, Error")

plt.show()

#fig, ax = plt.subplots(1, 1)

plt.contourf(weight,bias,error, levels, alpha = 0.7)

plt.title("Contour plot - Weight, Bias, Error")

plt.show()

plt.plot(range(0,53),weight,color='red', marker = 'o')

plt.xlabel("Weight")

plt.ylabel("Bias")

plt.title("Weight Vs Bias ")

plt.show()

plt.plot(weight,loss)

plt.scatter(weight,loss,marker = 'o', color = 'red')

plt.xlabel('Weight')

plt.ylabel('Loss')

plt.title("Weight Vs Loss for Vanilla Gradient Descent")


plt.show()

sn = Neuron()

sn.fit(X, y, epochs = 1000, learning_rate=0.0001)

OUTPUT

You might also like