Image Classification - Rithvik

You might also like

Download as pdf or txt
Download as pdf or txt
You are on page 1of 20

IMAGE CLASSIFICATION_RITHVIK

March 23, 2020

[1]: import cv2


import numpy as np
import pandas as pd

import matplotlib.pyplot as plt


%matplotlib inline

import os
import random

[2]: TRAINDIR = 'C:/Users/ADMIN/Desktop/Snehil devops/train'


CATEGORIES = ["apple","orange"]

[3]: nrows = 150


ncolumns = 150
channels = 3

[4]: train_data=[]
def create_training_data():
for category in CATEGORIES:
path = os.path.join(TRAINDIR,category) # path to apple or orange␣
,→directory

class_num = CATEGORIES.index(category)

for image in os.listdir(path): # Bunch of images in the path

image_array1 = cv2.imread(os.path.join(path,image),cv2.IMREAD_COLOR)

new_array1 = cv2.resize(image_array1,(nrows,ncolumns)) #␣
Normalising sizes of all pics to (150,150)
,→

color_img = cv2.cvtColor(new_array1, cv2.COLOR_BGR2RGB)


imgplot=plt.imshow(color_img)

train_data.append([color_img,class_num]) # Collecting all␣


training data along with indices
,→

1
create_training_data()

[5]: random.shuffle(train_data)

[6]: plt.imshow(train_data[1][0])
plt.show()

2
[7]: plt.imshow(train_data[2][0])
plt.show()

[8]: X = []
y = []

for feature,label in train_data:


X.append(feature)
y.append(label)

X = np.array(X).reshape(-1,nrows,ncolumns,channels)
y = np.array(y)

[9]: X[0]

[9]: array([[[255, 255, 255],


[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]],

3
[[255, 255, 255],
[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]],

[[255, 255, 255],


[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]],

…,

[[255, 255, 255],


[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]],

[[255, 255, 255],


[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]],

[[255, 255, 255],


[255, 255, 255],
[255, 255, 255],
…,
[255, 255, 255],
[255, 255, 255],
[255, 255, 255]]], dtype=uint8)

[10]: y

[10]: array([0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0,
0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1])

4
[11]: plt.figure(figsize=(20,20))
columns = 5
for i in range(columns):
plt.subplot(5/columns + 1, columns, i + 1)
plt.imshow(X[i])

[12]: import seaborn as sns


sns.countplot(y)
plt.title('Labels for apples and oranges')

[12]: Text(0.5, 1.0, 'Labels for apples and oranges')

[13]: X.shape

[13]: (40, 150, 150, 3)

5
[14]: y.shape

[14]: (40,)

[15]: from sklearn.model_selection import train_test_split


X_train, X_val, y_train, y_val = train_test_split(X,y,test_size=0.
,→2,random_state=2)

[16]: print(X_train.shape)
print(X_val.shape)
print(y_train.shape)
print(y_val.shape)

(32, 150, 150, 3)


(8, 150, 150, 3)
(32,)
(8,)

[17]: #get the length of the train and validation data


ntrain = len(X_train)
nval = len(X_val)

#We will use a batch size of 32. Note: batch size should be a factor of 2.
,→***4,8,16,32,64...***

batch_size = 32

print(ntrain)
print(nval)

32
8

[18]: from keras import layers


from keras import models
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing.image import img_to_array, load_img

model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu',input_shape=(150, 150,␣
,→3)))

model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))

6
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Flatten())
model.add(layers.Dropout(0.5)) #Dropout for regularization
model.add(layers.Dense(512, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid')) #Sigmoid function at the end␣
,→because we have just two classes

Using TensorFlow backend.

[19]: model.summary()

Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_1 (Conv2D) (None, 148, 148, 32) 896
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 74, 74, 32) 0
_________________________________________________________________
conv2d_2 (Conv2D) (None, 72, 72, 64) 18496
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 36, 36, 64) 0
_________________________________________________________________
conv2d_3 (Conv2D) (None, 34, 34, 128) 73856
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 17, 17, 128) 0
_________________________________________________________________
conv2d_4 (Conv2D) (None, 15, 15, 128) 147584
_________________________________________________________________
max_pooling2d_4 (MaxPooling2 (None, 7, 7, 128) 0
_________________________________________________________________
flatten_1 (Flatten) (None, 6272) 0
_________________________________________________________________
dropout_1 (Dropout) (None, 6272) 0
_________________________________________________________________
dense_1 (Dense) (None, 512) 3211776
_________________________________________________________________
dense_2 (Dense) (None, 1) 513
=================================================================
Total params: 3,453,121
Trainable params: 3,453,121
Non-trainable params: 0
_________________________________________________________________

[20]: model.compile(loss='binary_crossentropy', optimizer=optimizers.


,→RMSprop(lr=1e-4), metrics=['acc'])

7
[21]: train_datagen = ImageDataGenerator(rescale=1/255,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,)

val_datagen = ImageDataGenerator(rescale=1/255)

[22]: train_generator = train_datagen.flow(X_train, y_train, batch_size=batch_size)


val_generator = val_datagen.flow(X_val, y_val, batch_size=batch_size)

[23]: history = model.fit_generator(train_generator,


steps_per_epoch=ntrain // batch_size,
epochs=64,
validation_data=val_generator,
validation_steps=nval // batch_size)

Epoch 1/64
1/1 [==============================] - 6s 6s/step - loss: 0.7065 - acc: 0.4375 -
val_loss: 0.8212 - val_acc: 0.2500
Epoch 2/64
1/1 [==============================] - 1s 1s/step - loss: 0.6678 - acc: 0.5625 -
val_loss: 0.7575 - val_acc: 0.2500
Epoch 3/64
1/1 [==============================] - 1s 1s/step - loss: 0.6678 - acc: 0.5625 -
val_loss: 0.7680 - val_acc: 0.2500
Epoch 4/64
1/1 [==============================] - 1s 1s/step - loss: 0.6510 - acc: 0.5625 -
val_loss: 0.7596 - val_acc: 0.2500
Epoch 5/64
1/1 [==============================] - 1s 1s/step - loss: 0.6300 - acc: 0.5625 -
val_loss: 0.7385 - val_acc: 0.2500
Epoch 6/64
1/1 [==============================] - 1s 1s/step - loss: 0.6239 - acc: 0.5938 -
val_loss: 0.6869 - val_acc: 0.3750
Epoch 7/64
1/1 [==============================] - 1s 1s/step - loss: 0.6036 - acc: 0.6875 -
val_loss: 0.6855 - val_acc: 0.2500
Epoch 8/64
1/1 [==============================] - 1s 1s/step - loss: 0.5691 - acc: 0.6875 -
val_loss: 0.5079 - val_acc: 1.0000
Epoch 9/64
1/1 [==============================] - 1s 1s/step - loss: 0.5438 - acc: 0.9062 -
val_loss: 0.9853 - val_acc: 0.2500
Epoch 10/64

8
1/1 [==============================] - 1s 1s/step - loss: 0.6351 - acc: 0.5625 -
val_loss: 0.3528 - val_acc: 0.7500
Epoch 11/64
1/1 [==============================] - 1s 1s/step - loss: 0.7234 - acc: 0.4375 -
val_loss: 0.9200 - val_acc: 0.2500
Epoch 12/64
1/1 [==============================] - 1s 1s/step - loss: 0.5870 - acc: 0.5625 -
val_loss: 0.4810 - val_acc: 1.0000
Epoch 13/64
1/1 [==============================] - 1s 1s/step - loss: 0.4991 - acc: 0.9688 -
val_loss: 0.5023 - val_acc: 1.0000
Epoch 14/64
1/1 [==============================] - 1s 1s/step - loss: 0.4579 - acc: 0.9375 -
val_loss: 0.4560 - val_acc: 1.0000
Epoch 15/64
1/1 [==============================] - 1s 1s/step - loss: 0.4477 - acc: 0.9375 -
val_loss: 0.4165 - val_acc: 1.0000
Epoch 16/64
1/1 [==============================] - 1s 1s/step - loss: 0.4100 - acc: 0.9375 -
val_loss: 0.2992 - val_acc: 1.0000
Epoch 17/64
1/1 [==============================] - 1s 1s/step - loss: 0.3705 - acc: 1.0000 -
val_loss: 0.3474 - val_acc: 1.0000
Epoch 18/64
1/1 [==============================] - 1s 1s/step - loss: 0.3358 - acc: 0.9375 -
val_loss: 0.1423 - val_acc: 1.0000
Epoch 19/64
1/1 [==============================] - 1s 1s/step - loss: 0.4137 - acc: 0.8438 -
val_loss: 1.2224 - val_acc: 0.2500
Epoch 20/64
1/1 [==============================] - 1s 1s/step - loss: 0.6372 - acc: 0.5625 -
val_loss: 0.1364 - val_acc: 1.0000
Epoch 21/64
1/1 [==============================] - 1s 1s/step - loss: 0.4004 - acc: 0.8750 -
val_loss: 0.4918 - val_acc: 0.7500
Epoch 22/64
1/1 [==============================] - 1s 1s/step - loss: 0.3285 - acc: 0.8750 -
val_loss: 0.1318 - val_acc: 1.0000
Epoch 23/64
1/1 [==============================] - 1s 1s/step - loss: 0.2680 - acc: 0.9688 -
val_loss: 0.2443 - val_acc: 1.0000
Epoch 24/64
1/1 [==============================] - 1s 1s/step - loss: 0.2337 - acc: 0.9375 -
val_loss: 0.1242 - val_acc: 1.0000
Epoch 25/64
1/1 [==============================] - 1s 1s/step - loss: 0.2146 - acc: 1.0000 -
val_loss: 0.1625 - val_acc: 1.0000
Epoch 26/64

9
1/1 [==============================] - 1s 1s/step - loss: 0.1888 - acc: 0.9688 -
val_loss: 0.0721 - val_acc: 1.0000
Epoch 27/64
1/1 [==============================] - 1s 1s/step - loss: 0.1924 - acc: 1.0000 -
val_loss: 0.2031 - val_acc: 1.0000
Epoch 28/64
1/1 [==============================] - 1s 1s/step - loss: 0.1690 - acc: 0.9375 -
val_loss: 0.0373 - val_acc: 1.0000
Epoch 29/64
1/1 [==============================] - 1s 1s/step - loss: 0.2340 - acc: 0.9375 -
val_loss: 0.5769 - val_acc: 0.6250
Epoch 30/64
1/1 [==============================] - 2s 2s/step - loss: 0.3332 - acc: 0.8125 -
val_loss: 0.0345 - val_acc: 1.0000
Epoch 31/64
1/1 [==============================] - 1s 1s/step - loss: 0.2581 - acc: 0.9062 -
val_loss: 0.2331 - val_acc: 1.0000
Epoch 32/64
1/1 [==============================] - 1s 1s/step - loss: 0.1766 - acc: 0.9688 -
val_loss: 0.0436 - val_acc: 1.0000
Epoch 33/64
1/1 [==============================] - 1s 1s/step - loss: 0.0993 - acc: 1.0000 -
val_loss: 0.0478 - val_acc: 1.0000
Epoch 34/64
1/1 [==============================] - 1s 1s/step - loss: 0.1268 - acc: 0.9688 -
val_loss: 0.0373 - val_acc: 1.0000
Epoch 35/64
1/1 [==============================] - 1s 1s/step - loss: 0.1125 - acc: 0.9688 -
val_loss: 0.0281 - val_acc: 1.0000
Epoch 36/64
1/1 [==============================] - 1s 1s/step - loss: 0.0974 - acc: 1.0000 -
val_loss: 0.0399 - val_acc: 1.0000
Epoch 37/64
1/1 [==============================] - 1s 1s/step - loss: 0.0825 - acc: 1.0000 -
val_loss: 0.0229 - val_acc: 1.0000
Epoch 38/64
1/1 [==============================] - 1s 1s/step - loss: 0.0852 - acc: 1.0000 -
val_loss: 0.0247 - val_acc: 1.0000
Epoch 39/64
1/1 [==============================] - 1s 1s/step - loss: 0.0736 - acc: 1.0000 -
val_loss: 0.0178 - val_acc: 1.0000
Epoch 40/64
1/1 [==============================] - 1s 1s/step - loss: 0.0507 - acc: 1.0000 -
val_loss: 0.0230 - val_acc: 1.0000
Epoch 41/64
1/1 [==============================] - 1s 1s/step - loss: 0.1101 - acc: 0.9688 -
val_loss: 0.0066 - val_acc: 1.0000
Epoch 42/64

10
1/1 [==============================] - 1s 1s/step - loss: 0.0733 - acc: 1.0000 -
val_loss: 0.1129 - val_acc: 1.0000
Epoch 43/64
1/1 [==============================] - 1s 1s/step - loss: 0.1748 - acc: 0.9688 -
val_loss: 0.0071 - val_acc: 1.0000
Epoch 44/64
1/1 [==============================] - 1s 1s/step - loss: 0.2756 - acc: 0.8125 -
val_loss: 0.6086 - val_acc: 0.6250
Epoch 45/64
1/1 [==============================] - 1s 1s/step - loss: 0.4446 - acc: 0.7188 -
val_loss: 0.0070 - val_acc: 1.0000
Epoch 46/64
1/1 [==============================] - 1s 1s/step - loss: 0.0929 - acc: 1.0000 -
val_loss: 0.0168 - val_acc: 1.0000
Epoch 47/64
1/1 [==============================] - 1s 1s/step - loss: 0.0604 - acc: 1.0000 -
val_loss: 0.0117 - val_acc: 1.0000
Epoch 48/64
1/1 [==============================] - 1s 1s/step - loss: 0.0405 - acc: 1.0000 -
val_loss: 0.0113 - val_acc: 1.0000
Epoch 49/64
1/1 [==============================] - 1s 1s/step - loss: 0.0416 - acc: 1.0000 -
val_loss: 0.0101 - val_acc: 1.0000
Epoch 50/64
1/1 [==============================] - 1s 1s/step - loss: 0.0385 - acc: 1.0000 -
val_loss: 0.0082 - val_acc: 1.0000
Epoch 51/64
1/1 [==============================] - 1s 1s/step - loss: 0.0562 - acc: 0.9688 -
val_loss: 0.0057 - val_acc: 1.0000
Epoch 52/64
1/1 [==============================] - 1s 1s/step - loss: 0.0362 - acc: 1.0000 -
val_loss: 0.0069 - val_acc: 1.0000
Epoch 53/64
1/1 [==============================] - 1s 1s/step - loss: 0.0429 - acc: 1.0000 -
val_loss: 0.0074 - val_acc: 1.0000
Epoch 54/64
1/1 [==============================] - 1s 1s/step - loss: 0.0314 - acc: 1.0000 -
val_loss: 0.0062 - val_acc: 1.0000
Epoch 55/64
1/1 [==============================] - 1s 1s/step - loss: 0.0538 - acc: 0.9688 -
val_loss: 0.0033 - val_acc: 1.0000
Epoch 56/64
1/1 [==============================] - 1s 1s/step - loss: 0.0391 - acc: 1.0000 -
val_loss: 0.0060 - val_acc: 1.0000
Epoch 57/64
1/1 [==============================] - 1s 1s/step - loss: 0.0334 - acc: 1.0000 -
val_loss: 0.0031 - val_acc: 1.0000
Epoch 58/64

11
1/1 [==============================] - 1s 1s/step - loss: 0.0322 - acc: 1.0000 -
val_loss: 0.0054 - val_acc: 1.0000
Epoch 59/64
1/1 [==============================] - 1s 1s/step - loss: 0.0662 - acc: 0.9688 -
val_loss: 0.0015 - val_acc: 1.0000
Epoch 60/64
1/1 [==============================] - 1s 1s/step - loss: 0.0732 - acc: 1.0000 -
val_loss: 0.0736 - val_acc: 1.0000
Epoch 61/64
1/1 [==============================] - 1s 1s/step - loss: 0.1606 - acc: 0.9375 -
val_loss: 0.0015 - val_acc: 1.0000
Epoch 62/64
1/1 [==============================] - 1s 1s/step - loss: 0.0647 - acc: 1.0000 -
val_loss: 0.0077 - val_acc: 1.0000
Epoch 63/64
1/1 [==============================] - 1s 1s/step - loss: 0.0510 - acc: 0.9688 -
val_loss: 0.0018 - val_acc: 1.0000
Epoch 64/64
1/1 [==============================] - 1s 1s/step - loss: 0.0409 - acc: 1.0000 -
val_loss: 0.0014 - val_acc: 1.0000

[24]: #lets plot the train and val curve


#get the details form the history object
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(1, len(acc) + 1)

#Train and validation accuracy


plt.plot(epochs, acc, 'b', label='Training accurarcy')
plt.plot(epochs, val_acc, 'r', label='Validation accurarcy')
plt.title('Training and Validation accurarcy')
plt.legend()

plt.figure()
#Train and validation loss
plt.plot(epochs, loss, 'b', label='Training loss')
plt.plot(epochs, val_loss, 'r', label='Validation loss')
plt.title('Training and Validation loss')
plt.legend()

plt.show()

12
[25]: TRAINDIR2 = 'C:/Users/ADMIN/Desktop/Snehil devops/train2'
CATEGORIES = ["apple","orange"]

13
[26]: nrows = 150
ncolumns = 150
channels = 3

[27]: train_data=[]
def create_training_data():
for category in CATEGORIES:
path = os.path.join(TRAINDIR2,category) # path to apple or orange␣
,→directory

class_num = CATEGORIES.index(category)

for image in os.listdir(path): # Bunch of images in the path

image_array1 = cv2.imread(os.path.join(path,image))

new_array1 = cv2.resize(image_array1,(nrows,ncolumns)) #␣
Normalising sizes of all pics to (150,150)
,→

train_data.append([new_array1,class_num]) # Collecting all␣


training data along with indices
,→

create_training_data()

[28]: random.shuffle(train_data)

[29]: X = []
y = []

for feature,label in train_data:


X.append(feature)
y.append(label)

X = np.array(X).reshape(-1,nrows,ncolumns,channels)
y = np.array(y)

[30]: from sklearn.model_selection import train_test_split


X_train, X_val, y_train, y_val = train_test_split(X,y,test_size=0.
,→2,random_state=2)

[31]: from keras import layers


from keras import models
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing.image import img_to_array, load_img

model = models.Sequential()

14
model.add(layers.Conv2D(32, (3, 3), activation='relu',input_shape=(150, 150,␣
,→3)))

model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Flatten())
model.add(layers.Dropout(0.5)) #Dropout for regularization
model.add(layers.Dense(512, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid')) #Sigmoid function at the end␣
,→because we have just two classes

[32]: model.compile(loss='binary_crossentropy', optimizer=optimizers.


,→RMSprop(lr=1e-4), metrics=['acc'])

[33]: train_datagen = ImageDataGenerator(rescale=1/255,


rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,)

val_datagen = ImageDataGenerator(rescale=1/255)

[34]: train_generator = train_datagen.flow(X_train, y_train, batch_size=batch_size)


val_generator = val_datagen.flow(X_val, y_val, batch_size=batch_size)

[35]: history = model.fit_generator(train_generator,


steps_per_epoch=ntrain // batch_size,
epochs=64,
validation_data=val_generator,
validation_steps=nval // batch_size)

Epoch 1/64
1/1 [==============================] - 2s 2s/step - loss: 0.7488 - acc:
0.0000e+00 - val_loss: 0.7260 - val_acc: 0.0000e+00
Epoch 2/64
1/1 [==============================] - 0s 322ms/step - loss: 0.6702 - acc:
0.7500 - val_loss: 0.7659 - val_acc: 0.0000e+00
Epoch 3/64
1/1 [==============================] - 0s 238ms/step - loss: 0.7086 - acc:
0.2500 - val_loss: 0.7933 - val_acc: 0.0000e+00
Epoch 4/64

15
1/1 [==============================] - 0s 228ms/step - loss: 0.7221 - acc:
0.2500 - val_loss: 0.8002 - val_acc: 0.0000e+00
Epoch 5/64
1/1 [==============================] - 0s 223ms/step - loss: 0.6538 - acc:
0.5000 - val_loss: 0.8223 - val_acc: 0.0000e+00
Epoch 6/64
1/1 [==============================] - 0s 236ms/step - loss: 0.6598 - acc:
0.5000 - val_loss: 0.8112 - val_acc: 0.0000e+00
Epoch 7/64
1/1 [==============================] - 0s 220ms/step - loss: 0.6797 - acc:
0.5000 - val_loss: 0.8222 - val_acc: 0.0000e+00
Epoch 8/64
1/1 [==============================] - 0s 236ms/step - loss: 0.6600 - acc:
0.5000 - val_loss: 0.8145 - val_acc: 0.0000e+00
Epoch 9/64
1/1 [==============================] - 0s 227ms/step - loss: 0.6658 - acc:
0.5000 - val_loss: 0.8071 - val_acc: 0.0000e+00
Epoch 10/64
1/1 [==============================] - 0s 200ms/step - loss: 0.6317 - acc:
0.5000 - val_loss: 0.8206 - val_acc: 0.0000e+00
Epoch 11/64
1/1 [==============================] - 0s 211ms/step - loss: 0.6144 - acc:
0.5000 - val_loss: 0.8757 - val_acc: 0.0000e+00
Epoch 12/64
1/1 [==============================] - 0s 244ms/step - loss: 0.5905 - acc:
0.5000 - val_loss: 0.8940 - val_acc: 0.0000e+00
Epoch 13/64
1/1 [==============================] - 0s 226ms/step - loss: 0.5772 - acc:
0.5000 - val_loss: 0.8304 - val_acc: 0.0000e+00
Epoch 14/64
1/1 [==============================] - 0s 230ms/step - loss: 0.6076 - acc:
0.7500 - val_loss: 0.8506 - val_acc: 0.0000e+00
Epoch 15/64
1/1 [==============================] - 0s 231ms/step - loss: 0.5957 - acc:
0.7500 - val_loss: 0.8438 - val_acc: 0.0000e+00
Epoch 16/64
1/1 [==============================] - 0s 209ms/step - loss: 0.5621 - acc:
1.0000 - val_loss: 0.9591 - val_acc: 0.0000e+00
Epoch 17/64
1/1 [==============================] - 0s 221ms/step - loss: 0.5479 - acc:
0.7500 - val_loss: 0.8745 - val_acc: 0.0000e+00
Epoch 18/64
1/1 [==============================] - 0s 192ms/step - loss: 0.5567 - acc:
1.0000 - val_loss: 0.9368 - val_acc: 0.0000e+00
Epoch 19/64
1/1 [==============================] - 0s 230ms/step - loss: 0.5336 - acc:
0.5000 - val_loss: 0.7476 - val_acc: 0.0000e+00
Epoch 20/64

16
1/1 [==============================] - 0s 226ms/step - loss: 0.5187 - acc:
1.0000 - val_loss: 1.0511 - val_acc: 0.0000e+00
Epoch 21/64
1/1 [==============================] - 0s 227ms/step - loss: 0.5668 - acc:
0.5000 - val_loss: 0.6944 - val_acc: 0.0000e+00
Epoch 22/64
1/1 [==============================] - 0s 210ms/step - loss: 0.5034 - acc:
1.0000 - val_loss: 0.9773 - val_acc: 0.0000e+00
Epoch 23/64
1/1 [==============================] - 0s 230ms/step - loss: 0.4580 - acc:
0.7500 - val_loss: 0.7391 - val_acc: 0.0000e+00
Epoch 24/64
1/1 [==============================] - 0s 225ms/step - loss: 0.4911 - acc:
1.0000 - val_loss: 0.9671 - val_acc: 0.0000e+00
Epoch 25/64
1/1 [==============================] - 0s 229ms/step - loss: 0.4118 - acc:
1.0000 - val_loss: 0.6861 - val_acc: 1.0000
Epoch 26/64
1/1 [==============================] - 0s 233ms/step - loss: 0.4328 - acc:
1.0000 - val_loss: 0.8052 - val_acc: 0.0000e+00
Epoch 27/64
1/1 [==============================] - 0s 210ms/step - loss: 0.3462 - acc:
1.0000 - val_loss: 0.6915 - val_acc: 1.0000
Epoch 28/64
1/1 [==============================] - 0s 216ms/step - loss: 0.3413 - acc:
1.0000 - val_loss: 0.8950 - val_acc: 0.0000e+00
Epoch 29/64
1/1 [==============================] - 0s 214ms/step - loss: 0.3386 - acc:
1.0000 - val_loss: 0.7495 - val_acc: 0.0000e+00
Epoch 30/64
1/1 [==============================] - 0s 233ms/step - loss: 0.2826 - acc:
1.0000 - val_loss: 0.6020 - val_acc: 1.0000
Epoch 31/64
1/1 [==============================] - 0s 238ms/step - loss: 0.2306 - acc:
1.0000 - val_loss: 0.8484 - val_acc: 0.0000e+00
Epoch 32/64
1/1 [==============================] - 0s 219ms/step - loss: 0.2038 - acc:
1.0000 - val_loss: 0.2980 - val_acc: 1.0000
Epoch 33/64
1/1 [==============================] - 0s 223ms/step - loss: 0.2371 - acc:
1.0000 - val_loss: 1.2804 - val_acc: 0.0000e+00
Epoch 34/64
1/1 [==============================] - 0s 227ms/step - loss: 0.2799 - acc:
1.0000 - val_loss: 0.3936 - val_acc: 1.0000
Epoch 35/64
1/1 [==============================] - 0s 220ms/step - loss: 0.3017 - acc:
0.7500 - val_loss: 1.4285 - val_acc: 0.0000e+00
Epoch 36/64

17
1/1 [==============================] - 0s 213ms/step - loss: 0.3286 - acc:
0.7500 - val_loss: 0.3543 - val_acc: 1.0000
Epoch 37/64
1/1 [==============================] - 0s 230ms/step - loss: 0.2262 - acc:
1.0000 - val_loss: 0.8729 - val_acc: 0.0000e+00
Epoch 38/64
1/1 [==============================] - 0s 240ms/step - loss: 0.1715 - acc:
1.0000 - val_loss: 0.5858 - val_acc: 1.0000
Epoch 39/64
1/1 [==============================] - 0s 223ms/step - loss: 0.1511 - acc:
1.0000 - val_loss: 0.4047 - val_acc: 1.0000
Epoch 40/64
1/1 [==============================] - 0s 213ms/step - loss: 0.1389 - acc:
1.0000 - val_loss: 0.6890 - val_acc: 1.0000
Epoch 41/64
1/1 [==============================] - 0s 220ms/step - loss: 0.0760 - acc:
1.0000 - val_loss: 0.5406 - val_acc: 1.0000
Epoch 42/64
1/1 [==============================] - 0s 228ms/step - loss: 0.1174 - acc:
1.0000 - val_loss: 0.3511 - val_acc: 1.0000
Epoch 43/64
1/1 [==============================] - 0s 204ms/step - loss: 0.0817 - acc:
1.0000 - val_loss: 0.3587 - val_acc: 1.0000
Epoch 44/64
1/1 [==============================] - 0s 220ms/step - loss: 0.1399 - acc:
1.0000 - val_loss: 0.7814 - val_acc: 0.0000e+00
Epoch 45/64
1/1 [==============================] - 0s 209ms/step - loss: 0.1517 - acc:
1.0000 - val_loss: 0.6013 - val_acc: 1.0000
Epoch 46/64
1/1 [==============================] - 0s 244ms/step - loss: 0.1596 - acc:
1.0000 - val_loss: 0.7223 - val_acc: 0.0000e+00
Epoch 47/64
1/1 [==============================] - 0s 226ms/step - loss: 0.0931 - acc:
1.0000 - val_loss: 0.2536 - val_acc: 1.0000
Epoch 48/64
1/1 [==============================] - 0s 221ms/step - loss: 0.0430 - acc:
1.0000 - val_loss: 0.2913 - val_acc: 1.0000
Epoch 49/64
1/1 [==============================] - 0s 243ms/step - loss: 0.0561 - acc:
1.0000 - val_loss: 0.6297 - val_acc: 1.0000
Epoch 50/64
1/1 [==============================] - 0s 217ms/step - loss: 0.0457 - acc:
1.0000 - val_loss: 0.1851 - val_acc: 1.0000
Epoch 51/64
1/1 [==============================] - 0s 201ms/step - loss: 0.3246 - acc:
0.7500 - val_loss: 3.0876 - val_acc: 0.0000e+00
Epoch 52/64

18
1/1 [==============================] - 0s 223ms/step - loss: 0.4525 - acc:
0.7500 - val_loss: 0.4010 - val_acc: 1.0000
Epoch 53/64
1/1 [==============================] - 0s 230ms/step - loss: 0.0689 - acc:
1.0000 - val_loss: 0.4990 - val_acc: 1.0000
Epoch 54/64
1/1 [==============================] - 0s 229ms/step - loss: 0.1680 - acc:
1.0000 - val_loss: 1.5214 - val_acc: 0.0000e+00
Epoch 55/64
1/1 [==============================] - 0s 208ms/step - loss: 0.2285 - acc:
0.7500 - val_loss: 0.2791 - val_acc: 1.0000
Epoch 56/64
1/1 [==============================] - 0s 231ms/step - loss: 0.1566 - acc:
1.0000 - val_loss: 1.1356 - val_acc: 0.0000e+00
Epoch 57/64
1/1 [==============================] - 0s 227ms/step - loss: 0.0868 - acc:
1.0000 - val_loss: 0.4983 - val_acc: 1.0000
Epoch 58/64
1/1 [==============================] - 0s 215ms/step - loss: 0.0646 - acc:
1.0000 - val_loss: 0.2248 - val_acc: 1.0000
Epoch 59/64
1/1 [==============================] - 0s 232ms/step - loss: 0.2712 - acc:
0.7500 - val_loss: 2.0532 - val_acc: 0.0000e+00
Epoch 60/64
1/1 [==============================] - 0s 242ms/step - loss: 0.2136 - acc:
0.7500 - val_loss: 0.4599 - val_acc: 1.0000
Epoch 61/64
1/1 [==============================] - 0s 214ms/step - loss: 0.0810 - acc:
1.0000 - val_loss: 0.5290 - val_acc: 1.0000
Epoch 62/64
1/1 [==============================] - 0s 227ms/step - loss: 0.0383 - acc:
1.0000 - val_loss: 0.3421 - val_acc: 1.0000
Epoch 63/64
1/1 [==============================] - 0s 239ms/step - loss: 0.0513 - acc:
1.0000 - val_loss: 0.4359 - val_acc: 1.0000
Epoch 64/64
1/1 [==============================] - 0s 235ms/step - loss: 0.0359 - acc:
1.0000 - val_loss: 0.3430 - val_acc: 1.0000

[36]: test_datagen=ImageDataGenerator(rescale=1./255)

[37]: i = 0
text_labels = []
plt.figure(figsize=(30,20))
for batch in test_datagen.flow(X, batch_size=1):
pred = model.predict(batch)
if pred > 0.5:

19
text_labels.append('orange')
else:
text_labels.append('apple')
plt.subplot(5 / columns + 1, columns, i + 1)
plt.title('This is a ' + text_labels[i])
color_img = cv2.cvtColor(batch[0], cv2.COLOR_BGR2RGB)
imgplot=plt.imshow(color_img)
i += 1
if i % 10 == 0:
break
plt.show()

[ ]:

20

You might also like