Loadalgarve MLP

You might also like

Download as pdf or txt
Download as pdf or txt
You are on page 1of 7

loadalgarve MLP

March 24, 2024

[ ]: import numpy as np
import pandas as pd
from scipy.io import loadmat

'''Constructing a pandas dataframe from a matlab .mat file'''

data_set = loadmat("LCAlgarve.mat")
mat = {k: v for k, v in data_set.items() if k[0] != '_'}
data = pd.DataFrame({k: pd.Series(v[:, 0]) for k, v in mat.items()})
data.to_csv("example.csv")

[ ]: df = pd.read_csv('example.csv')
df.dtypes

[ ]: Unnamed: 0 int64
DayCode1 float64
DayCode2 float64
Occupation1 float64
Occupation2 int64
Power1 float64
Power2 float64
dtype: object

[ ]: df.shape

[ ]: (1751, 7)

[ ]: df.isnull().any()

[ ]: Unnamed: 0 False
DayCode1 True
DayCode2 False
Occupation1 True
Occupation2 False
Power1 True
Power2 False
dtype: bool

1
[ ]: df.head()

[ ]: Unnamed: 0 DayCode1 DayCode2 Occupation1 Occupation2 Power1 \


0 0 0.5 0.8 3.0 5 0.755691
1 1 0.5 0.8 3.0 5 0.339651
2 2 0.5 0.8 3.0 5 0.222168
3 3 0.5 0.8 3.0 5 0.404500
4 4 0.5 0.8 3.0 5 0.956347

Power2
0 0.747135
1 1.370311
2 0.759630
3 1.125687
4 1.910926

[ ]: df.isnull().any()

[ ]: Unnamed: 0 False
DayCode1 True
DayCode2 False
Occupation1 True
Occupation2 False
Power1 True
Power2 False
dtype: bool

[ ]: df.duplicated()

[ ]: 0 False
1 False
2 False
3 False
4 False

1746 False
1747 False
1748 False
1749 False
1750 False
Length: 1751, dtype: bool

[ ]: data1 = pd.read_csv('example.csv', index_col=0)


print(data1.dtypes)

DayCode1 float64
DayCode2 float64
Occupation1 float64

2
Occupation2 int64
Power1 float64
Power2 float64
dtype: object

[ ]: import sklearn
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import classification_report, confusion_matrix

data1 = pd.read_csv('example.csv', index_col=0)


data2 = data1[['DayCode2', 'Occupation2', 'Power2']]
print(data2)

DayCode2 Occupation2 Power2


0 0.80 5 0.747135
1 0.80 5 1.370311
2 0.80 5 0.759630
3 0.80 5 1.125687
4 0.80 5 1.910926
… … … …
1746 0.30 3 2.672848
1747 0.30 3 2.337520
1748 0.30 3 2.354926
1749 0.35 2 1.933089
1750 0.35 2 2.036769

[1751 rows x 3 columns]

[ ]: # https://gist.githubusercontent.com/rupak-roy/edd1afa3d07bb928f70df5a344a08c6a/
↪raw/20dfe5f218815ad28e60db1d065e009045f3e0fa/

↪Multi-Layer%20Perception%20(MLP)%20for%20time%20series%20forecasting

dataset = data2.values
data = dataset.flatten()
raw_seq = data.tolist()

[ ]: # split a univariate sequence into samples


def split_sequence(sequence, n_steps):
X, y = list(), list()
for i in range(len(sequence)):
# find the end of this pattern
end_ix = i + n_steps
# check if we are beyond the sequence
if end_ix > len(sequence)-1:
break
# input and output sample

3
seq_x, seq_y = sequence[i:end_ix], sequence[end_ix]
X.append(seq_x)
y.append(seq_y)
return array(X), array(y)

[ ]: from numpy import array


from keras.models import Sequential
from keras.layers import Dense

# choose a number of time steps


n_steps = 3
# split into samples
X, y = split_sequence(raw_seq, n_steps)

# define model
model = Sequential()
model.add(Dense(150, activation='relu', input_dim=n_steps))
model.add(Dense(1))
model.compile(optimizer='adam', loss='mse', metrics =['accuracy'])

C:\Users\iamro\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n
2kfra8p0\LocalCache\local-packages\Python311\site-
packages\keras\src\layers\core\dense.py:88: UserWarning: Do not pass an
`input_shape`/`input_dim` argument to a layer. When using Sequential models,
prefer using an `Input(shape)` object as the first layer in the model instead.
super().__init__(activity_regularizer=activity_regularizer, **kwargs)

[ ]: # fit model

history=model.fit(X, y, epochs=40, verbose=1)

Epoch 1/40
165/165 �������������������� 1s 943us/step -
accuracy: 0.0552 - loss: 0.9641
Epoch 2/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0526 - loss: 0.1490
Epoch 3/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0535 - loss: 0.1530
Epoch 4/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0500 - loss: 0.1494
Epoch 5/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0551 - loss: 0.1623
Epoch 6/40

4
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0558 - loss: 0.1314
Epoch 7/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0516 - loss: 0.1423
Epoch 8/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0514 - loss: 0.1502
Epoch 9/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0533 - loss: 0.1519
Epoch 10/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0502 - loss: 0.1419
Epoch 11/40
165/165 �������������������� 0s 964us/step -
accuracy: 0.0518 - loss: 0.1426
Epoch 12/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0503 - loss: 0.1524
Epoch 13/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0518 - loss: 0.1734
Epoch 14/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0537 - loss: 0.1584
Epoch 15/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0563 - loss: 0.1523
Epoch 16/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0548 - loss: 0.1581
Epoch 17/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0540 - loss: 0.1571
Epoch 18/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0594 - loss: 0.1407
Epoch 19/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0548 - loss: 0.1636
Epoch 20/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0634 - loss: 0.1570
Epoch 21/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0587 - loss: 0.1582
Epoch 22/40

5
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0467 - loss: 0.1653
Epoch 23/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0564 - loss: 0.1517
Epoch 24/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0584 - loss: 0.1477
Epoch 25/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0610 - loss: 0.1595
Epoch 26/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0521 - loss: 0.1462
Epoch 27/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0508 - loss: 0.1465
Epoch 28/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0550 - loss: 0.1374
Epoch 29/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0515 - loss: 0.1387
Epoch 30/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0588 - loss: 0.1497
Epoch 31/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0568 - loss: 0.1446
Epoch 32/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0557 - loss: 0.1574
Epoch 33/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0518 - loss: 0.1488
Epoch 34/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0504 - loss: 0.1515
Epoch 35/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0552 - loss: 0.1662
Epoch 36/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0524 - loss: 0.1371
Epoch 37/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0539 - loss: 0.1671
Epoch 38/40

6
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0544 - loss: 0.1384
Epoch 39/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0488 - loss: 0.1528
Epoch 40/40
165/165 �������������������� 0s 1ms/step -
accuracy: 0.0518 - loss: 0.1438

[ ]: from matplotlib import pyplot


pyplot.plot(history.history['loss'], label='train')
pyplot.plot(history.history['accuracy'], label='test')
pyplot.legend()
pyplot.show()

You might also like