! jupyter nbconvert --to html ///content/CIFAR10_T.ipynb
[NbConvertApp] Converting notebook ///content/CIFAR10_T.ipynb to html [NbConvertApp] Writing 919494 bytes to /content/CIFAR10_T.html
import tensorflow as tf
# Check if a GPU is available
if tf.test.gpu_device_name():
print('GPU device found:', tf.test.gpu_device_name())
else:
print("No GPU found")
GPU device found: /device:GPU:0
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import numpy as np
from tensorflow.keras.regularizers import l2
from keras.preprocessing.image import ImageDataGenerator
(x_train, y_train), (x_test, y_test)=tf.keras.datasets.cifar10.load_data()
from sklearn.model_selection import train_test_split
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size=0.15, random_state=42)
print("Training set shape:", x_train.shape, y_train.shape)
print("Vaidation set shape:", x_val.shape, y_val.shape)
print("Test set shape:", x_test.shape, y_test.shape)
Training set shape: (42500, 32, 32, 3) (42500, 1) Vaidation set shape: (7500, 32, 32, 3) (7500, 1) Test set shape: (10000, 32, 32, 3) (10000, 1)
normalizing images:
x_train, x_test, x_val= x_train/255.0, x_test/255.0, x_val/255.0
print(f"shape of one image:{x_train[0].shape}")
shape of one image:(32, 32, 3)
#visualizing an image:
plt.imshow(x_train[1, :, :, :])
<matplotlib.image.AxesImage at 0x7fcb484eeb00>
number of classes:
num_classes = len(np.unique(y_train))
print(f"number of classes:{num_classes}")
number of classes:10
building a VGG like mode
from keras.models import Sequential
from keras.layers import Conv2D, BatchNormalization, Activation, MaxPooling2D, GlobalAveragePooling2D, Dense, Dropout, Flatten
from keras.regularizers import l2
np.random.seed(42)
tf.random.set_seed(42)
model = Sequential([
Conv2D(32, kernel_size=(3, 3), kernel_initializer="he_normal", padding="same", kernel_regularizer=l2(0.01),input_shape=[32, 32, 3]),
BatchNormalization(),
Activation("relu"),
Conv2D(32, kernel_size=(3, 3), kernel_initializer="he_normal", padding="same"),
BatchNormalization(),
Activation("relu"),
MaxPooling2D(2),
Dropout(0.2),
Conv2D(64, 3, kernel_initializer="he_normal", padding="same"),
BatchNormalization(),
Activation("relu"),
Conv2D(64, 3, kernel_initializer="he_normal", padding="same"),
BatchNormalization(),
Activation("relu"),
MaxPooling2D(2),
Dropout(0.3),
Conv2D(128, 3, kernel_initializer="he_normal", padding="same"),
BatchNormalization(),
Activation("relu"),
Conv2D(128, 3, kernel_initializer="he_normal", padding="same"),
BatchNormalization(),
Activation("relu"),
MaxPooling2D(2),
Dropout(0.4),
Flatten(),
Dense(512, kernel_initializer="he_normal"),
BatchNormalization(),
Activation("relu"),
Dense(256, kernel_initializer="he_normal"),
BatchNormalization(),
Activation("relu"),
Dropout(0.5),
Dense(10, activation="softmax")
])
model.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_18 (Conv2D) (None, 32, 32, 32) 896 batch_normalization_24 (Ba (None, 32, 32, 32) 128 tchNormalization) activation_24 (Activation) (None, 32, 32, 32) 0 conv2d_19 (Conv2D) (None, 32, 32, 32) 9248 batch_normalization_25 (Ba (None, 32, 32, 32) 128 tchNormalization) activation_25 (Activation) (None, 32, 32, 32) 0 max_pooling2d_9 (MaxPoolin (None, 16, 16, 32) 0 g2D) dropout_12 (Dropout) (None, 16, 16, 32) 0 conv2d_20 (Conv2D) (None, 16, 16, 64) 18496 batch_normalization_26 (Ba (None, 16, 16, 64) 256 tchNormalization) activation_26 (Activation) (None, 16, 16, 64) 0 conv2d_21 (Conv2D) (None, 16, 16, 64) 36928 batch_normalization_27 (Ba (None, 16, 16, 64) 256 tchNormalization) activation_27 (Activation) (None, 16, 16, 64) 0 max_pooling2d_10 (MaxPooli (None, 8, 8, 64) 0 ng2D) dropout_13 (Dropout) (None, 8, 8, 64) 0 conv2d_22 (Conv2D) (None, 8, 8, 128) 73856 batch_normalization_28 (Ba (None, 8, 8, 128) 512 tchNormalization) activation_28 (Activation) (None, 8, 8, 128) 0 conv2d_23 (Conv2D) (None, 8, 8, 128) 147584 batch_normalization_29 (Ba (None, 8, 8, 128) 512 tchNormalization) activation_29 (Activation) (None, 8, 8, 128) 0 max_pooling2d_11 (MaxPooli (None, 4, 4, 128) 0 ng2D) dropout_14 (Dropout) (None, 4, 4, 128) 0 flatten_3 (Flatten) (None, 2048) 0 dense_9 (Dense) (None, 512) 1049088 batch_normalization_30 (Ba (None, 512) 2048 tchNormalization) activation_30 (Activation) (None, 512) 0 dense_10 (Dense) (None, 256) 131328 batch_normalization_31 (Ba (None, 256) 1024 tchNormalization) activation_31 (Activation) (None, 256) 0 dropout_15 (Dropout) (None, 256) 0 dense_11 (Dense) (None, 10) 2570 ================================================================= Total params: 1474858 (5.63 MB) Trainable params: 1472426 (5.62 MB) Non-trainable params: 2432 (9.50 KB) _________________________________________________________________
model.compile(loss="sparse_categorical_crossentropy",
optimizer= tf.keras.optimizers.Adam(learning_rate=0.001),
metrics=["accuracy"])
x_train.shape
(42500, 32, 32, 3)
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=35, restore_best_weights=True)
history = model.fit(
x_train, y_train,
epochs=100,
validation_data=(x_val, y_val),
batch_size=128,
callbacks=[early_stopping]
)
Epoch 1/100 333/333 [==============================] - 14s 28ms/step - loss: 2.1815 - accuracy: 0.3922 - val_loss: 2.8111 - val_accuracy: 0.2499 Epoch 2/100 333/333 [==============================] - 8s 25ms/step - loss: 1.4477 - accuracy: 0.5716 - val_loss: 1.2840 - val_accuracy: 0.6105 Epoch 3/100 333/333 [==============================] - 8s 25ms/step - loss: 1.1598 - accuracy: 0.6451 - val_loss: 1.1491 - val_accuracy: 0.6399 Epoch 4/100 333/333 [==============================] - 9s 26ms/step - loss: 1.0213 - accuracy: 0.6816 - val_loss: 1.0683 - val_accuracy: 0.6592 Epoch 5/100 333/333 [==============================] - 8s 25ms/step - loss: 0.8983 - accuracy: 0.7168 - val_loss: 0.8265 - val_accuracy: 0.7425 Epoch 6/100 333/333 [==============================] - 8s 25ms/step - loss: 0.8242 - accuracy: 0.7352 - val_loss: 0.8740 - val_accuracy: 0.7173 Epoch 7/100 333/333 [==============================] - 9s 27ms/step - loss: 0.7621 - accuracy: 0.7563 - val_loss: 1.0051 - val_accuracy: 0.6903 Epoch 8/100 333/333 [==============================] - 8s 25ms/step - loss: 0.7125 - accuracy: 0.7684 - val_loss: 0.7809 - val_accuracy: 0.7411 Epoch 9/100 333/333 [==============================] - 9s 26ms/step - loss: 0.6820 - accuracy: 0.7790 - val_loss: 0.8986 - val_accuracy: 0.7044 Epoch 10/100 333/333 [==============================] - 9s 26ms/step - loss: 0.6544 - accuracy: 0.7852 - val_loss: 0.9095 - val_accuracy: 0.7028 Epoch 11/100 333/333 [==============================] - 8s 25ms/step - loss: 0.6070 - accuracy: 0.8005 - val_loss: 0.7156 - val_accuracy: 0.7688 Epoch 12/100 333/333 [==============================] - 9s 27ms/step - loss: 0.5785 - accuracy: 0.8097 - val_loss: 0.7096 - val_accuracy: 0.7720 Epoch 13/100 333/333 [==============================] - 8s 25ms/step - loss: 0.5339 - accuracy: 0.8253 - val_loss: 0.6502 - val_accuracy: 0.7912 Epoch 14/100 333/333 [==============================] - 9s 26ms/step - loss: 0.5173 - accuracy: 0.8277 - val_loss: 0.6417 - val_accuracy: 0.7953 Epoch 15/100 333/333 [==============================] - 9s 26ms/step - loss: 0.5021 - accuracy: 0.8330 - val_loss: 0.6824 - val_accuracy: 0.7837 Epoch 16/100 333/333 [==============================] - 8s 25ms/step - loss: 0.4731 - accuracy: 0.8437 - val_loss: 0.6999 - val_accuracy: 0.7752 Epoch 17/100 333/333 [==============================] - 9s 27ms/step - loss: 0.4620 - accuracy: 0.8447 - val_loss: 0.7171 - val_accuracy: 0.7724 Epoch 18/100 333/333 [==============================] - 9s 26ms/step - loss: 0.4566 - accuracy: 0.8466 - val_loss: 0.6102 - val_accuracy: 0.8045 Epoch 19/100 333/333 [==============================] - 9s 26ms/step - loss: 0.4146 - accuracy: 0.8619 - val_loss: 0.5261 - val_accuracy: 0.8336 Epoch 20/100 333/333 [==============================] - 9s 27ms/step - loss: 0.3878 - accuracy: 0.8710 - val_loss: 0.6228 - val_accuracy: 0.8037 Epoch 21/100 333/333 [==============================] - 8s 25ms/step - loss: 0.3746 - accuracy: 0.8740 - val_loss: 0.6115 - val_accuracy: 0.8111 Epoch 22/100 333/333 [==============================] - 8s 25ms/step - loss: 0.3586 - accuracy: 0.8814 - val_loss: 0.5792 - val_accuracy: 0.8199 Epoch 23/100 333/333 [==============================] - 9s 27ms/step - loss: 0.3435 - accuracy: 0.8854 - val_loss: 0.6382 - val_accuracy: 0.8011 Epoch 24/100 333/333 [==============================] - 8s 24ms/step - loss: 0.3676 - accuracy: 0.8766 - val_loss: 0.5218 - val_accuracy: 0.8411 Epoch 25/100 333/333 [==============================] - 9s 26ms/step - loss: 0.3589 - accuracy: 0.8793 - val_loss: 0.5062 - val_accuracy: 0.8411 Epoch 26/100 333/333 [==============================] - 9s 26ms/step - loss: 0.3387 - accuracy: 0.8851 - val_loss: 0.5017 - val_accuracy: 0.8435 Epoch 27/100 333/333 [==============================] - 9s 26ms/step - loss: 0.3075 - accuracy: 0.8971 - val_loss: 0.5137 - val_accuracy: 0.8453 Epoch 28/100 333/333 [==============================] - 9s 27ms/step - loss: 0.2909 - accuracy: 0.9027 - val_loss: 0.5809 - val_accuracy: 0.8229 Epoch 29/100 333/333 [==============================] - 8s 25ms/step - loss: 0.3175 - accuracy: 0.8945 - val_loss: 0.5979 - val_accuracy: 0.8225 Epoch 30/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2883 - accuracy: 0.9033 - val_loss: 0.7254 - val_accuracy: 0.7865 Epoch 31/100 333/333 [==============================] - 9s 27ms/step - loss: 0.2854 - accuracy: 0.9050 - val_loss: 0.5319 - val_accuracy: 0.8425 Epoch 32/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2778 - accuracy: 0.9066 - val_loss: 0.9148 - val_accuracy: 0.7479 Epoch 33/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2726 - accuracy: 0.9098 - val_loss: 0.6182 - val_accuracy: 0.8241 Epoch 34/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2392 - accuracy: 0.9211 - val_loss: 0.5985 - val_accuracy: 0.8277 Epoch 35/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2473 - accuracy: 0.9175 - val_loss: 0.5731 - val_accuracy: 0.8360 Epoch 36/100 333/333 [==============================] - 9s 27ms/step - loss: 0.2661 - accuracy: 0.9110 - val_loss: 0.7691 - val_accuracy: 0.7935 Epoch 37/100 333/333 [==============================] - 8s 24ms/step - loss: 0.2675 - accuracy: 0.9100 - val_loss: 0.6339 - val_accuracy: 0.8225 Epoch 38/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2303 - accuracy: 0.9228 - val_loss: 0.5416 - val_accuracy: 0.8485 Epoch 39/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2317 - accuracy: 0.9239 - val_loss: 0.7458 - val_accuracy: 0.8037 Epoch 40/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2499 - accuracy: 0.9169 - val_loss: 0.5168 - val_accuracy: 0.8524 Epoch 41/100 333/333 [==============================] - 9s 27ms/step - loss: 0.2130 - accuracy: 0.9303 - val_loss: 0.5442 - val_accuracy: 0.8504 Epoch 42/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2189 - accuracy: 0.9263 - val_loss: 0.5850 - val_accuracy: 0.8375 Epoch 43/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2254 - accuracy: 0.9256 - val_loss: 0.5458 - val_accuracy: 0.8507 Epoch 44/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2183 - accuracy: 0.9285 - val_loss: 0.5655 - val_accuracy: 0.8492 Epoch 45/100 333/333 [==============================] - 8s 25ms/step - loss: 0.1747 - accuracy: 0.9429 - val_loss: 0.6605 - val_accuracy: 0.8287 Epoch 46/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2272 - accuracy: 0.9254 - val_loss: 0.6354 - val_accuracy: 0.8277 Epoch 47/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2424 - accuracy: 0.9208 - val_loss: 0.5102 - val_accuracy: 0.8583 Epoch 48/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1800 - accuracy: 0.9407 - val_loss: 1.0850 - val_accuracy: 0.7360 Epoch 49/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2413 - accuracy: 0.9213 - val_loss: 0.5361 - val_accuracy: 0.8544 Epoch 50/100 333/333 [==============================] - 8s 25ms/step - loss: 0.1766 - accuracy: 0.9426 - val_loss: 0.6326 - val_accuracy: 0.8303 Epoch 51/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1938 - accuracy: 0.9362 - val_loss: 0.5227 - val_accuracy: 0.8619 Epoch 52/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1586 - accuracy: 0.9491 - val_loss: 0.8107 - val_accuracy: 0.7984 Epoch 53/100 333/333 [==============================] - 8s 25ms/step - loss: 0.1693 - accuracy: 0.9461 - val_loss: 0.6900 - val_accuracy: 0.8260 Epoch 54/100 333/333 [==============================] - 9s 27ms/step - loss: 0.2002 - accuracy: 0.9368 - val_loss: 0.5650 - val_accuracy: 0.8500 Epoch 55/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1837 - accuracy: 0.9415 - val_loss: 0.6433 - val_accuracy: 0.8263 Epoch 56/100 333/333 [==============================] - 9s 26ms/step - loss: 0.2074 - accuracy: 0.9337 - val_loss: 0.6380 - val_accuracy: 0.8251 Epoch 57/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1710 - accuracy: 0.9445 - val_loss: 0.5740 - val_accuracy: 0.8481 Epoch 58/100 333/333 [==============================] - 8s 25ms/step - loss: 0.2147 - accuracy: 0.9313 - val_loss: 0.5711 - val_accuracy: 0.8491 Epoch 59/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1899 - accuracy: 0.9393 - val_loss: 0.5822 - val_accuracy: 0.8487 Epoch 60/100 333/333 [==============================] - 9s 26ms/step - loss: 0.1500 - accuracy: 0.9516 - val_loss: 0.5655 - val_accuracy: 0.8564 Epoch 61/100 333/333 [==============================] - 8s 24ms/step - loss: 0.1332 - accuracy: 0.9573 - val_loss: 0.5880 - val_accuracy: 0.8573
print(history.history)
{'loss': [2.1814823150634766, 1.447716474533081, 1.159835696220398, 1.0213079452514648, 0.8983412981033325, 0.8241708874702454, 0.7620853781700134, 0.7125028967857361, 0.6819764375686646, 0.654421865940094, 0.606985867023468, 0.5785194635391235, 0.5338768362998962, 0.517296314239502, 0.5021288394927979, 0.4731230139732361, 0.46196967363357544, 0.456638365983963, 0.4146466553211212, 0.38777464628219604, 0.37456390261650085, 0.3585664629936218, 0.34347325563430786, 0.36756110191345215, 0.35886356234550476, 0.3386956751346588, 0.30754387378692627, 0.2909289002418518, 0.3175148665904999, 0.2883400022983551, 0.2853758633136749, 0.27776485681533813, 0.27255940437316895, 0.2392112761735916, 0.2473367601633072, 0.26606637239456177, 0.26751887798309326, 0.23032227158546448, 0.23169395327568054, 0.24985891580581665, 0.21300002932548523, 0.21887780725955963, 0.22535985708236694, 0.21827474236488342, 0.174684539437294, 0.22717952728271484, 0.24235911667346954, 0.18002931773662567, 0.24132470786571503, 0.17658685147762299, 0.19376330077648163, 0.1585652381181717, 0.16934652626514435, 0.20016571879386902, 0.1836690604686737, 0.20739488303661346, 0.17102877795696259, 0.21471071243286133, 0.18991082906723022, 0.1500278264284134, 0.13319918513298035], 'accuracy': [0.3921647071838379, 0.571647047996521, 0.645129382610321, 0.6815764904022217, 0.7167529463768005, 0.7351529598236084, 0.7563058733940125, 0.7684000134468079, 0.778988242149353, 0.7851529121398926, 0.8004705905914307, 0.8096941113471985, 0.8252705931663513, 0.8276705741882324, 0.8330117464065552, 0.8436706066131592, 0.8446588516235352, 0.8465882539749146, 0.8618588447570801, 0.8709882497787476, 0.8739529252052307, 0.8813647031784058, 0.8854352831840515, 0.8765646815299988, 0.879341185092926, 0.8850823640823364, 0.8970588445663452, 0.9027293920516968, 0.8944705724716187, 0.9032941460609436, 0.9049646854400635, 0.9066352844238281, 0.9098117351531982, 0.9211294054985046, 0.9174588322639465, 0.9110117554664612, 0.9099764823913574, 0.9228470325469971, 0.9239059090614319, 0.9168941378593445, 0.9303058981895447, 0.9263294339179993, 0.9255529642105103, 0.9284706115722656, 0.9428706169128418, 0.9253647327423096, 0.9207529425621033, 0.9407294392585754, 0.921294093132019, 0.9426352977752686, 0.9362353086471558, 0.9490588307380676, 0.9460706114768982, 0.9367764592170715, 0.9414588212966919, 0.9337411522865295, 0.9445411562919617, 0.9313176274299622, 0.9392706155776978, 0.9516235589981079, 0.9572705626487732], 'val_loss': [2.811070203781128, 1.2840464115142822, 1.1491369009017944, 1.068296194076538, 0.8264782428741455, 0.8739816546440125, 1.0051121711730957, 0.7809421420097351, 0.8986237645149231, 0.9094749093055725, 0.7156433463096619, 0.709630012512207, 0.650221586227417, 0.6417231559753418, 0.682414710521698, 0.6998876333236694, 0.7170525789260864, 0.6101602911949158, 0.5261286497116089, 0.6227561235427856, 0.6115197539329529, 0.5791767835617065, 0.6381644010543823, 0.5218157172203064, 0.5061997771263123, 0.5017004013061523, 0.5136963129043579, 0.5809248685836792, 0.59792560338974, 0.72543865442276, 0.5319222211837769, 0.9148004055023193, 0.6181521415710449, 0.5984808206558228, 0.5731435418128967, 0.769096851348877, 0.6339108943939209, 0.5415687561035156, 0.745840311050415, 0.5168358683586121, 0.5441645383834839, 0.585003674030304, 0.5457754135131836, 0.5655258893966675, 0.6604974865913391, 0.6353902816772461, 0.5101633667945862, 1.0849993228912354, 0.5360724925994873, 0.6326237916946411, 0.5226666331291199, 0.8107460141181946, 0.6900234818458557, 0.5649811625480652, 0.6433318257331848, 0.6380101442337036, 0.5740437507629395, 0.571129322052002, 0.5821999311447144, 0.5654805302619934, 0.5880140662193298], 'val_accuracy': [0.24986666440963745, 0.6105333566665649, 0.6398666501045227, 0.6592000126838684, 0.7425333261489868, 0.7173333168029785, 0.6902666687965393, 0.7410666942596436, 0.7044000029563904, 0.7027999758720398, 0.7688000202178955, 0.7720000147819519, 0.7911999821662903, 0.7953333258628845, 0.7837333083152771, 0.7752000093460083, 0.7724000215530396, 0.8045333623886108, 0.8335999846458435, 0.8037333488464355, 0.8110666871070862, 0.8198666572570801, 0.8010666370391846, 0.8410666584968567, 0.8410666584968567, 0.8434666395187378, 0.8453333377838135, 0.8229333162307739, 0.8225333094596863, 0.7865333557128906, 0.8425333499908447, 0.747866690158844, 0.8241333365440369, 0.8277333378791809, 0.8360000252723694, 0.7934666872024536, 0.8225333094596863, 0.8485333323478699, 0.8037333488464355, 0.852400004863739, 0.8503999710083008, 0.8374666571617126, 0.8506666421890259, 0.8492000102996826, 0.8286666870117188, 0.8277333378791809, 0.8582666516304016, 0.7360000014305115, 0.8543999791145325, 0.8302666544914246, 0.8618666529655457, 0.7983999848365784, 0.8259999752044678, 0.8500000238418579, 0.8262666463851929, 0.8250666856765747, 0.8481333255767822, 0.8490666747093201, 0.8486666679382324, 0.8564000129699707, 0.8573333621025085]}
fig, ax = plt.subplots(figsize=(12,7))
ax.plot(history.history["loss"],label="train_loss")
ax.plot(history.history["accuracy"],label="train_accuracy")
ax.plot(history.history["val_loss"],label = "val_loss")
ax.plot(history.history["val_accuracy"],label = 'val_accuracy')
ax.legend()
<matplotlib.legend.Legend at 0x7f67b9c20190>
model.evaluate(x_test,y_test)
313/313 [==============================] - 1s 4ms/step - loss: 0.5215 - accuracy: 0.8369
[0.5214540362358093, 0.836899995803833]
with categorical_crossentropy as loss
# One-hot encode labels
from keras.utils import to_categorical
encoded_train_labels = to_categorical(y_train)
encoded_val_labels = to_categorical(y_val)
encoded_test_labels = to_categorical(y_test)
tf.random.set_seed(42)
# Create an instance of ImageDataGenerator for data augmentation
datagen = ImageDataGenerator(
rotation_range=10,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True,
vertical_flip=False,
fill_mode='nearest'
)
# Flow the training data through the data generator
train_generator = datagen.flow(x_train, encoded_train_labels, batch_size=128)
model.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_18 (Conv2D) (None, 32, 32, 32) 896 batch_normalization_24 (Ba (None, 32, 32, 32) 128 tchNormalization) activation_24 (Activation) (None, 32, 32, 32) 0 conv2d_19 (Conv2D) (None, 32, 32, 32) 9248 batch_normalization_25 (Ba (None, 32, 32, 32) 128 tchNormalization) activation_25 (Activation) (None, 32, 32, 32) 0 max_pooling2d_9 (MaxPoolin (None, 16, 16, 32) 0 g2D) dropout_12 (Dropout) (None, 16, 16, 32) 0 conv2d_20 (Conv2D) (None, 16, 16, 64) 18496 batch_normalization_26 (Ba (None, 16, 16, 64) 256 tchNormalization) activation_26 (Activation) (None, 16, 16, 64) 0 conv2d_21 (Conv2D) (None, 16, 16, 64) 36928 batch_normalization_27 (Ba (None, 16, 16, 64) 256 tchNormalization) activation_27 (Activation) (None, 16, 16, 64) 0 max_pooling2d_10 (MaxPooli (None, 8, 8, 64) 0 ng2D) dropout_13 (Dropout) (None, 8, 8, 64) 0 conv2d_22 (Conv2D) (None, 8, 8, 128) 73856 batch_normalization_28 (Ba (None, 8, 8, 128) 512 tchNormalization) activation_28 (Activation) (None, 8, 8, 128) 0 conv2d_23 (Conv2D) (None, 8, 8, 128) 147584 batch_normalization_29 (Ba (None, 8, 8, 128) 512 tchNormalization) activation_29 (Activation) (None, 8, 8, 128) 0 max_pooling2d_11 (MaxPooli (None, 4, 4, 128) 0 ng2D) dropout_14 (Dropout) (None, 4, 4, 128) 0 flatten_3 (Flatten) (None, 2048) 0 dense_9 (Dense) (None, 512) 1049088 batch_normalization_30 (Ba (None, 512) 2048 tchNormalization) activation_30 (Activation) (None, 512) 0 dense_10 (Dense) (None, 256) 131328 batch_normalization_31 (Ba (None, 256) 1024 tchNormalization) activation_31 (Activation) (None, 256) 0 dropout_15 (Dropout) (None, 256) 0 dense_11 (Dense) (None, 10) 2570 ================================================================= Total params: 1474858 (5.63 MB) Trainable params: 1472426 (5.62 MB) Non-trainable params: 2432 (9.50 KB) _________________________________________________________________
initial_learning_rate = 0.001
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate, decay_steps=10000, decay_rate=0.9, staircase=True)
model.compile(loss="categorical_crossentropy",
optimizer=tf.keras.optimizers.Adam(learning_rate=lr_schedule),
metrics=["accuracy"])
tf.random.set_seed(42)
# Early stopping callback
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=30, restore_best_weights=True)
# Training the model
history = model.fit(
train_generator,
steps_per_epoch=len(x_train) // 128,
epochs=100,
validation_data=(x_val, encoded_val_labels),
callbacks=[early_stopping]
)
Epoch 1/100 332/332 [==============================] - 35s 88ms/step - loss: 2.2503 - accuracy: 0.3636 - val_loss: 2.4740 - val_accuracy: 0.2751 Epoch 2/100 332/332 [==============================] - 28s 86ms/step - loss: 1.6354 - accuracy: 0.5013 - val_loss: 1.4476 - val_accuracy: 0.5596 Epoch 3/100 332/332 [==============================] - 28s 86ms/step - loss: 1.3748 - accuracy: 0.5713 - val_loss: 1.2575 - val_accuracy: 0.6137 Epoch 4/100 332/332 [==============================] - 29s 87ms/step - loss: 1.1999 - accuracy: 0.6220 - val_loss: 1.2061 - val_accuracy: 0.6209 Epoch 5/100 332/332 [==============================] - 29s 87ms/step - loss: 1.0889 - accuracy: 0.6513 - val_loss: 1.2372 - val_accuracy: 0.6001 Epoch 6/100 332/332 [==============================] - 38s 115ms/step - loss: 1.0084 - accuracy: 0.6781 - val_loss: 0.9363 - val_accuracy: 0.7037 Epoch 7/100 332/332 [==============================] - 29s 87ms/step - loss: 0.9518 - accuracy: 0.6928 - val_loss: 0.9280 - val_accuracy: 0.7045 Epoch 8/100 332/332 [==============================] - 30s 91ms/step - loss: 0.8750 - accuracy: 0.7135 - val_loss: 1.1263 - val_accuracy: 0.6623 Epoch 9/100 332/332 [==============================] - 30s 92ms/step - loss: 0.8599 - accuracy: 0.7216 - val_loss: 0.9867 - val_accuracy: 0.6864 Epoch 10/100 332/332 [==============================] - 31s 93ms/step - loss: 0.8087 - accuracy: 0.7353 - val_loss: 0.8491 - val_accuracy: 0.7235 Epoch 11/100 332/332 [==============================] - 30s 90ms/step - loss: 0.7795 - accuracy: 0.7437 - val_loss: 0.8315 - val_accuracy: 0.7308 Epoch 12/100 332/332 [==============================] - 29s 88ms/step - loss: 0.7608 - accuracy: 0.7500 - val_loss: 1.1223 - val_accuracy: 0.6516 Epoch 13/100 332/332 [==============================] - 29s 88ms/step - loss: 0.7363 - accuracy: 0.7578 - val_loss: 1.0212 - val_accuracy: 0.6684 Epoch 14/100 332/332 [==============================] - 30s 90ms/step - loss: 0.7015 - accuracy: 0.7692 - val_loss: 0.9009 - val_accuracy: 0.7007 Epoch 15/100 332/332 [==============================] - 29s 88ms/step - loss: 0.6979 - accuracy: 0.7697 - val_loss: 0.8599 - val_accuracy: 0.7293 Epoch 16/100 332/332 [==============================] - 29s 87ms/step - loss: 0.6862 - accuracy: 0.7723 - val_loss: 0.8373 - val_accuracy: 0.7313 Epoch 17/100 332/332 [==============================] - 28s 85ms/step - loss: 0.7094 - accuracy: 0.7640 - val_loss: 0.6445 - val_accuracy: 0.7871 Epoch 18/100 332/332 [==============================] - 28s 85ms/step - loss: 0.6457 - accuracy: 0.7886 - val_loss: 0.6968 - val_accuracy: 0.7748 Epoch 19/100 332/332 [==============================] - 29s 86ms/step - loss: 0.6494 - accuracy: 0.7848 - val_loss: 1.6434 - val_accuracy: 0.5668 Epoch 20/100 332/332 [==============================] - 28s 86ms/step - loss: 0.6462 - accuracy: 0.7849 - val_loss: 0.6053 - val_accuracy: 0.8029 Epoch 21/100 332/332 [==============================] - 29s 88ms/step - loss: 0.6023 - accuracy: 0.7991 - val_loss: 0.6696 - val_accuracy: 0.7768 Epoch 22/100 332/332 [==============================] - 29s 87ms/step - loss: 0.5983 - accuracy: 0.7990 - val_loss: 0.5745 - val_accuracy: 0.8157 Epoch 23/100 332/332 [==============================] - 30s 90ms/step - loss: 0.5723 - accuracy: 0.8089 - val_loss: 0.6432 - val_accuracy: 0.7947 Epoch 24/100 332/332 [==============================] - 29s 88ms/step - loss: 0.5705 - accuracy: 0.8090 - val_loss: 0.6200 - val_accuracy: 0.7965 Epoch 25/100 332/332 [==============================] - 30s 89ms/step - loss: 0.5616 - accuracy: 0.8132 - val_loss: 0.6248 - val_accuracy: 0.7921 Epoch 26/100 332/332 [==============================] - 30s 90ms/step - loss: 0.5511 - accuracy: 0.8137 - val_loss: 0.5401 - val_accuracy: 0.8241 Epoch 27/100 332/332 [==============================] - 29s 86ms/step - loss: 0.5419 - accuracy: 0.8181 - val_loss: 0.5843 - val_accuracy: 0.8083 Epoch 28/100 332/332 [==============================] - 29s 86ms/step - loss: 0.5365 - accuracy: 0.8216 - val_loss: 0.5099 - val_accuracy: 0.8335 Epoch 29/100 332/332 [==============================] - 29s 86ms/step - loss: 0.5279 - accuracy: 0.8232 - val_loss: 0.7367 - val_accuracy: 0.7585 Epoch 30/100 332/332 [==============================] - 30s 90ms/step - loss: 0.5282 - accuracy: 0.8245 - val_loss: 0.5212 - val_accuracy: 0.8281 Epoch 31/100 332/332 [==============================] - 29s 87ms/step - loss: 0.5049 - accuracy: 0.8304 - val_loss: 0.4803 - val_accuracy: 0.8405 Epoch 32/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4922 - accuracy: 0.8340 - val_loss: 0.5274 - val_accuracy: 0.8249 Epoch 33/100 332/332 [==============================] - 29s 88ms/step - loss: 0.5156 - accuracy: 0.8288 - val_loss: 0.5499 - val_accuracy: 0.8180 Epoch 34/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4879 - accuracy: 0.8362 - val_loss: 0.4948 - val_accuracy: 0.8340 Epoch 35/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4813 - accuracy: 0.8393 - val_loss: 0.5486 - val_accuracy: 0.8232 Epoch 36/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4820 - accuracy: 0.8370 - val_loss: 0.4943 - val_accuracy: 0.8405 Epoch 37/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4696 - accuracy: 0.8420 - val_loss: 0.4421 - val_accuracy: 0.8528 Epoch 38/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4553 - accuracy: 0.8460 - val_loss: 0.6157 - val_accuracy: 0.8068 Epoch 39/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4540 - accuracy: 0.8481 - val_loss: 0.5530 - val_accuracy: 0.8193 Epoch 40/100 332/332 [==============================] - 28s 85ms/step - loss: 0.4684 - accuracy: 0.8423 - val_loss: 0.4231 - val_accuracy: 0.8603 Epoch 41/100 332/332 [==============================] - 30s 89ms/step - loss: 0.4471 - accuracy: 0.8519 - val_loss: 0.5048 - val_accuracy: 0.8357 Epoch 42/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4449 - accuracy: 0.8493 - val_loss: 0.5347 - val_accuracy: 0.8316 Epoch 43/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4466 - accuracy: 0.8504 - val_loss: 0.5400 - val_accuracy: 0.8236 Epoch 44/100 332/332 [==============================] - 29s 86ms/step - loss: 0.4576 - accuracy: 0.8446 - val_loss: 0.4800 - val_accuracy: 0.8460 Epoch 45/100 332/332 [==============================] - 28s 86ms/step - loss: 0.4538 - accuracy: 0.8468 - val_loss: 0.4544 - val_accuracy: 0.8544 Epoch 46/100 332/332 [==============================] - 28s 85ms/step - loss: 0.4319 - accuracy: 0.8543 - val_loss: 0.5253 - val_accuracy: 0.8331 Epoch 47/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4254 - accuracy: 0.8578 - val_loss: 0.4923 - val_accuracy: 0.8419 Epoch 48/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4493 - accuracy: 0.8496 - val_loss: 0.5229 - val_accuracy: 0.8324 Epoch 49/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4170 - accuracy: 0.8593 - val_loss: 0.4393 - val_accuracy: 0.8569 Epoch 50/100 332/332 [==============================] - 29s 88ms/step - loss: 0.4137 - accuracy: 0.8594 - val_loss: 0.4549 - val_accuracy: 0.8473 Epoch 51/100 332/332 [==============================] - 29s 88ms/step - loss: 0.4208 - accuracy: 0.8596 - val_loss: 0.4924 - val_accuracy: 0.8413 Epoch 52/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4202 - accuracy: 0.8596 - val_loss: 0.4178 - val_accuracy: 0.8693 Epoch 53/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4065 - accuracy: 0.8624 - val_loss: 0.4333 - val_accuracy: 0.8601 Epoch 54/100 332/332 [==============================] - 29s 86ms/step - loss: 0.3995 - accuracy: 0.8655 - val_loss: 0.4129 - val_accuracy: 0.8656 Epoch 55/100 332/332 [==============================] - 29s 87ms/step - loss: 0.4124 - accuracy: 0.8612 - val_loss: 0.3922 - val_accuracy: 0.8727 Epoch 56/100 332/332 [==============================] - 29s 86ms/step - loss: 0.3979 - accuracy: 0.8651 - val_loss: 0.4538 - val_accuracy: 0.8555 Epoch 57/100 332/332 [==============================] - 29s 88ms/step - loss: 0.4276 - accuracy: 0.8558 - val_loss: 0.4020 - val_accuracy: 0.8696 Epoch 58/100 332/332 [==============================] - 29s 87ms/step - loss: 0.3846 - accuracy: 0.8701 - val_loss: 0.4396 - val_accuracy: 0.8577 Epoch 59/100 332/332 [==============================] - 30s 91ms/step - loss: 0.3890 - accuracy: 0.8693 - val_loss: 0.4552 - val_accuracy: 0.8551 Epoch 60/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3756 - accuracy: 0.8726 - val_loss: 0.4478 - val_accuracy: 0.8541 Epoch 61/100 332/332 [==============================] - 29s 88ms/step - loss: 0.4166 - accuracy: 0.8607 - val_loss: 0.4986 - val_accuracy: 0.8405 Epoch 62/100 332/332 [==============================] - 32s 96ms/step - loss: 0.3849 - accuracy: 0.8699 - val_loss: 0.4237 - val_accuracy: 0.8651 Epoch 63/100 332/332 [==============================] - 29s 87ms/step - loss: 0.3927 - accuracy: 0.8679 - val_loss: 0.4153 - val_accuracy: 0.8689 Epoch 64/100 332/332 [==============================] - 29s 88ms/step - loss: 0.3759 - accuracy: 0.8745 - val_loss: 0.4198 - val_accuracy: 0.8648 Epoch 65/100 332/332 [==============================] - 31s 92ms/step - loss: 0.3685 - accuracy: 0.8752 - val_loss: 0.4057 - val_accuracy: 0.8699 Epoch 66/100 332/332 [==============================] - 30s 90ms/step - loss: 0.3613 - accuracy: 0.8775 - val_loss: 0.3952 - val_accuracy: 0.8759 Epoch 67/100 332/332 [==============================] - 30s 90ms/step - loss: 0.3657 - accuracy: 0.8777 - val_loss: 0.4230 - val_accuracy: 0.8675 Epoch 68/100 332/332 [==============================] - 31s 93ms/step - loss: 0.3623 - accuracy: 0.8781 - val_loss: 0.4624 - val_accuracy: 0.8581 Epoch 69/100 332/332 [==============================] - 30s 90ms/step - loss: 0.3569 - accuracy: 0.8802 - val_loss: 0.4630 - val_accuracy: 0.8537 Epoch 70/100 332/332 [==============================] - 30s 89ms/step - loss: 0.3601 - accuracy: 0.8787 - val_loss: 0.4172 - val_accuracy: 0.8701 Epoch 71/100 332/332 [==============================] - 30s 89ms/step - loss: 0.3577 - accuracy: 0.8792 - val_loss: 0.4655 - val_accuracy: 0.8505 Epoch 72/100 332/332 [==============================] - 31s 93ms/step - loss: 0.3514 - accuracy: 0.8820 - val_loss: 0.4812 - val_accuracy: 0.8499 Epoch 73/100 332/332 [==============================] - 33s 101ms/step - loss: 0.3500 - accuracy: 0.8812 - val_loss: 0.4419 - val_accuracy: 0.8644 Epoch 74/100 332/332 [==============================] - 31s 95ms/step - loss: 0.3579 - accuracy: 0.8806 - val_loss: 0.4917 - val_accuracy: 0.8465 Epoch 75/100 332/332 [==============================] - 31s 94ms/step - loss: 0.3519 - accuracy: 0.8819 - val_loss: 0.6016 - val_accuracy: 0.8209 Epoch 76/100 332/332 [==============================] - 32s 97ms/step - loss: 0.3529 - accuracy: 0.8821 - val_loss: 0.4192 - val_accuracy: 0.8655 Epoch 77/100 332/332 [==============================] - 32s 95ms/step - loss: 0.3446 - accuracy: 0.8854 - val_loss: 0.3970 - val_accuracy: 0.8725 Epoch 78/100 332/332 [==============================] - 29s 89ms/step - loss: 0.3479 - accuracy: 0.8824 - val_loss: 0.4055 - val_accuracy: 0.8693 Epoch 79/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3460 - accuracy: 0.8842 - val_loss: 0.4221 - val_accuracy: 0.8687 Epoch 80/100 332/332 [==============================] - 29s 87ms/step - loss: 0.3414 - accuracy: 0.8854 - val_loss: 0.4875 - val_accuracy: 0.8533 Epoch 81/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3375 - accuracy: 0.8859 - val_loss: 0.3575 - val_accuracy: 0.8836 Epoch 82/100 332/332 [==============================] - 28s 86ms/step - loss: 0.3413 - accuracy: 0.8856 - val_loss: 0.4031 - val_accuracy: 0.8723 Epoch 83/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3303 - accuracy: 0.8889 - val_loss: 0.3889 - val_accuracy: 0.8717 Epoch 84/100 332/332 [==============================] - 28s 83ms/step - loss: 0.3458 - accuracy: 0.8834 - val_loss: 0.4282 - val_accuracy: 0.8631 Epoch 85/100 332/332 [==============================] - 28s 84ms/step - loss: 0.3376 - accuracy: 0.8861 - val_loss: 0.4299 - val_accuracy: 0.8684 Epoch 86/100 332/332 [==============================] - 28s 83ms/step - loss: 0.3398 - accuracy: 0.8858 - val_loss: 0.8184 - val_accuracy: 0.7811 Epoch 87/100 332/332 [==============================] - 29s 86ms/step - loss: 0.3369 - accuracy: 0.8868 - val_loss: 0.3671 - val_accuracy: 0.8813 Epoch 88/100 332/332 [==============================] - 27s 83ms/step - loss: 0.3202 - accuracy: 0.8923 - val_loss: 0.3939 - val_accuracy: 0.8752 Epoch 89/100 332/332 [==============================] - 27s 82ms/step - loss: 0.3300 - accuracy: 0.8897 - val_loss: 0.4502 - val_accuracy: 0.8623 Epoch 90/100 332/332 [==============================] - 28s 84ms/step - loss: 0.3222 - accuracy: 0.8917 - val_loss: 0.5465 - val_accuracy: 0.8371 Epoch 91/100 332/332 [==============================] - 29s 88ms/step - loss: 0.3308 - accuracy: 0.8872 - val_loss: 0.4462 - val_accuracy: 0.8609 Epoch 92/100 332/332 [==============================] - 28s 84ms/step - loss: 0.3228 - accuracy: 0.8915 - val_loss: 0.4621 - val_accuracy: 0.8571 Epoch 93/100 332/332 [==============================] - 29s 88ms/step - loss: 0.3181 - accuracy: 0.8931 - val_loss: 0.4314 - val_accuracy: 0.8676 Epoch 94/100 332/332 [==============================] - 29s 87ms/step - loss: 0.3145 - accuracy: 0.8952 - val_loss: 0.3759 - val_accuracy: 0.8801 Epoch 95/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3075 - accuracy: 0.8947 - val_loss: 0.3806 - val_accuracy: 0.8815 Epoch 96/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3123 - accuracy: 0.8954 - val_loss: 0.4212 - val_accuracy: 0.8668 Epoch 97/100 332/332 [==============================] - 29s 86ms/step - loss: 0.3338 - accuracy: 0.8883 - val_loss: 0.3813 - val_accuracy: 0.8801 Epoch 98/100 332/332 [==============================] - 28s 86ms/step - loss: 0.3095 - accuracy: 0.8961 - val_loss: 0.3924 - val_accuracy: 0.8763 Epoch 99/100 332/332 [==============================] - 28s 85ms/step - loss: 0.3078 - accuracy: 0.8958 - val_loss: 0.4343 - val_accuracy: 0.8652 Epoch 100/100 332/332 [==============================] - 29s 86ms/step - loss: 0.3009 - accuracy: 0.9003 - val_loss: 0.4108 - val_accuracy: 0.8743
print(history.history)
fig, ax = plt.subplots(figsize=(12,7))
ax.plot(history.history["loss"],label="train_loss")
ax.plot(history.history["accuracy"],label="train_accuracy")
ax.plot(history.history["val_loss"],label = "val_loss")
ax.plot(history.history["val_accuracy"],label = 'val_accuracy')
ax.legend()
{'loss': [2.250312328338623, 1.635426640510559, 1.3747870922088623, 1.1999064683914185, 1.0889379978179932, 1.008361577987671, 0.9517942667007446, 0.8749510645866394, 0.8598518967628479, 0.8086517453193665, 0.7794846892356873, 0.7607797980308533, 0.7363200783729553, 0.701499879360199, 0.6978700160980225, 0.6862258315086365, 0.7094489932060242, 0.6456923484802246, 0.6493508219718933, 0.6461933851242065, 0.6022962331771851, 0.598313570022583, 0.5722615122795105, 0.5705021023750305, 0.5615644454956055, 0.5510632991790771, 0.5419420599937439, 0.5365434288978577, 0.5278522372245789, 0.528217077255249, 0.5049289464950562, 0.49220168590545654, 0.515593945980072, 0.48791414499282837, 0.4813007712364197, 0.48197004199028015, 0.4695904850959778, 0.45528560876846313, 0.4539899528026581, 0.4683857262134552, 0.447124719619751, 0.44485294818878174, 0.4465904235839844, 0.4576266407966614, 0.4538140296936035, 0.43187621235847473, 0.4253907799720764, 0.4493212103843689, 0.41696351766586304, 0.4137227237224579, 0.4208303689956665, 0.42015963792800903, 0.4065101444721222, 0.39946648478507996, 0.4123947024345398, 0.3979446589946747, 0.4275928735733032, 0.38464123010635376, 0.3890003561973572, 0.3756003677845001, 0.4165921211242676, 0.3848891258239746, 0.39267030358314514, 0.37594956159591675, 0.36852702498435974, 0.3613286018371582, 0.3657245635986328, 0.36228862404823303, 0.35691651701927185, 0.36010923981666565, 0.3576708734035492, 0.35143858194351196, 0.35002702474594116, 0.35785502195358276, 0.3519352078437805, 0.35294410586357117, 0.3445744216442108, 0.34792473912239075, 0.3460395634174347, 0.3414057493209839, 0.3374975621700287, 0.3413466811180115, 0.3303438425064087, 0.3458380103111267, 0.3376101851463318, 0.3398320972919464, 0.3369210660457611, 0.32021379470825195, 0.33002305030822754, 0.32222989201545715, 0.33083459734916687, 0.3227861821651459, 0.3181193172931671, 0.314506471157074, 0.3074586093425751, 0.3123388886451721, 0.3338242769241333, 0.309494286775589, 0.30777233839035034, 0.30093634128570557], 'accuracy': [0.3635655641555786, 0.5012980103492737, 0.571344256401062, 0.6220381259918213, 0.6513263583183289, 0.6780657172203064, 0.6928160190582275, 0.713466465473175, 0.7215614318847656, 0.7353441119194031, 0.7436986565589905, 0.750023603439331, 0.7577881813049316, 0.7692108154296875, 0.7696828246116638, 0.7722788453102112, 0.7639715075492859, 0.7886103987693787, 0.784787118434906, 0.7849051356315613, 0.799136221408844, 0.7990182042121887, 0.8088832497596741, 0.8089540004730225, 0.8132021427154541, 0.8136741518974304, 0.8180873990058899, 0.8216038942337036, 0.823208749294281, 0.8244595527648926, 0.83038330078125, 0.8340177536010742, 0.8288256525993347, 0.8361889719963074, 0.8393042683601379, 0.8369914293289185, 0.8419710993766785, 0.8459832072257996, 0.8481308221817017, 0.8423486948013306, 0.8519068956375122, 0.8493108749389648, 0.8504201173782349, 0.8446379899978638, 0.8468092083930969, 0.8542905449867249, 0.8577834367752075, 0.8496412634849548, 0.8592938780784607, 0.8593882918357849, 0.8596478700637817, 0.8595534563064575, 0.8624327182769775, 0.8654772043228149, 0.8612291216850281, 0.8651232123374939, 0.855777382850647, 0.8700792789459229, 0.8692532777786255, 0.8726281523704529, 0.8606626987457275, 0.8699377179145813, 0.8678608536720276, 0.8744925856590271, 0.8751770257949829, 0.8774898648262024, 0.8777258396148682, 0.8781270384788513, 0.8801566958427429, 0.8786934614181519, 0.8792362809181213, 0.881973922252655, 0.8812187314033508, 0.8806287050247192, 0.8818795680999756, 0.8820683360099792, 0.8854432106018066, 0.8823515772819519, 0.884168803691864, 0.8854196071624756, 0.8858916163444519, 0.8855612277984619, 0.8888652920722961, 0.883389949798584, 0.8861276507377625, 0.8857735991477966, 0.8867884278297424, 0.8922637701034546, 0.8897148966789246, 0.8916501402854919, 0.8872132301330566, 0.8915321230888367, 0.8931369781494141, 0.8951666355133057, 0.8947182297706604, 0.8954498171806335, 0.8882516622543335, 0.8961342573165894, 0.8957566022872925, 0.9003115296363831], 'val_loss': [2.473998546600342, 1.4475561380386353, 1.257472276687622, 1.2061374187469482, 1.2372276782989502, 0.9362546801567078, 0.9279881715774536, 1.1263319253921509, 0.9867005348205566, 0.8490851521492004, 0.8315287828445435, 1.1222822666168213, 1.0212199687957764, 0.9008691906929016, 0.8598985075950623, 0.8373152613639832, 0.6445251703262329, 0.6968488097190857, 1.643428921699524, 0.6052645444869995, 0.6696333289146423, 0.5744549036026001, 0.6432464122772217, 0.6199572682380676, 0.6247666478157043, 0.5400819778442383, 0.5843408107757568, 0.5098875761032104, 0.7366741895675659, 0.5212010145187378, 0.4803029000759125, 0.5274070501327515, 0.5499218106269836, 0.4947817325592041, 0.5486045479774475, 0.4942537248134613, 0.4421330988407135, 0.615730881690979, 0.5530040264129639, 0.4230826795101166, 0.5047702193260193, 0.5346550941467285, 0.539975643157959, 0.47997742891311646, 0.4544055759906769, 0.5253060460090637, 0.49231868982315063, 0.522911548614502, 0.4392528235912323, 0.4549023509025574, 0.492374449968338, 0.41777172684669495, 0.43328341841697693, 0.4128981828689575, 0.39222753047943115, 0.4537571668624878, 0.4019990861415863, 0.4395880401134491, 0.45517584681510925, 0.4477686285972595, 0.49864912033081055, 0.42370039224624634, 0.4152754843235016, 0.41976115107536316, 0.40574538707733154, 0.39517277479171753, 0.4229939579963684, 0.4624478816986084, 0.4629608988761902, 0.4171876311302185, 0.46552011370658875, 0.4811772406101227, 0.4419132173061371, 0.491688072681427, 0.6016214489936829, 0.4192078709602356, 0.396955281496048, 0.40548720955848694, 0.42207199335098267, 0.48745277523994446, 0.3575287163257599, 0.4030650854110718, 0.3888617753982544, 0.4281570017337799, 0.42988696694374084, 0.8184012770652771, 0.3671082556247711, 0.39391207695007324, 0.45020416378974915, 0.5465275645256042, 0.4462036192417145, 0.46211153268814087, 0.43140852451324463, 0.3758588135242462, 0.3805990517139435, 0.42120444774627686, 0.3813328444957733, 0.39239054918289185, 0.4343244135379791, 0.41076478362083435], 'val_accuracy': [0.27506667375564575, 0.5595999956130981, 0.6137333512306213, 0.6209333539009094, 0.6001333594322205, 0.7037333250045776, 0.7045333385467529, 0.6622666716575623, 0.6863999962806702, 0.723466694355011, 0.7307999730110168, 0.6516000032424927, 0.66839998960495, 0.7006666660308838, 0.7293333411216736, 0.731333315372467, 0.787066638469696, 0.7748000025749207, 0.5667999982833862, 0.8029333353042603, 0.7767999768257141, 0.8157333135604858, 0.7946666479110718, 0.7965333461761475, 0.7921333312988281, 0.8241333365440369, 0.8082666397094727, 0.833466649055481, 0.7585333585739136, 0.8281333446502686, 0.8405333161354065, 0.8249333500862122, 0.8180000185966492, 0.8339999914169312, 0.823199987411499, 0.8405333161354065, 0.8528000116348267, 0.8068000078201294, 0.8193333148956299, 0.8602666854858398, 0.8357333540916443, 0.83160001039505, 0.8235999941825867, 0.8460000157356262, 0.8543999791145325, 0.8330666422843933, 0.841866672039032, 0.8324000239372253, 0.8569333553314209, 0.8473333120346069, 0.8413333296775818, 0.8693333268165588, 0.8601333498954773, 0.8655999898910522, 0.8726666569709778, 0.8554666638374329, 0.8695999979972839, 0.8577333092689514, 0.8550666570663452, 0.8541333079338074, 0.8405333161354065, 0.865066647529602, 0.8689333200454712, 0.864799976348877, 0.869866669178009, 0.8758666515350342, 0.8674666881561279, 0.8581333160400391, 0.8537333607673645, 0.8701333403587341, 0.8505333065986633, 0.8498666882514954, 0.8644000291824341, 0.8465333580970764, 0.8209333419799805, 0.8654666543006897, 0.8725333213806152, 0.8693333268165588, 0.8686666488647461, 0.8533333539962769, 0.8835999965667725, 0.8722666501998901, 0.8717333078384399, 0.8630666732788086, 0.868399977684021, 0.7810666561126709, 0.8813333511352539, 0.8751999735832214, 0.8622666597366333, 0.837066650390625, 0.8609333038330078, 0.8570666909217834, 0.8676000237464905, 0.880133330821991, 0.8814666867256165, 0.8668000102043152, 0.880133330821991, 0.8762666583061218, 0.8651999831199646, 0.8742666840553284]}
<matplotlib.legend.Legend at 0x7fcb3c5a77c0>
model.evaluate(x_test, encoded_test_labels)
313/313 [==============================] - 2s 5ms/step - loss: 0.4472 - accuracy: 0.8665
[0.44718217849731445, 0.8665000200271606]
x_test.shape
(10000, 32, 32, 3)
x1=x_test[615]
x1.shape
(32, 32, 3)
plt.figure(figsize=(3, 3),dpi=50)
plt.imshow(x1)
<matplotlib.image.AxesImage at 0x7fcb3c40bdf0>
x1 = x1.reshape((1, 32, 32, 3))
# Assuming model expects input shape (batch_size, height, width, channels)
prediction = model.predict(x1)
1/1 [==============================] - 0s 94ms/step
print(prediction)
[[2.1478393e-06 1.8128403e-05 2.6587983e-05 3.9318818e-04 1.3967255e-07 1.7272690e-05 3.3286619e-07 1.5380884e-07 9.9286360e-01 6.6784704e-03]]
class_labels = ["Airplane", "Automobile", "Bird", "Cat", "Deer", "Dog", "Frog", "Horse", "Ship", "Truck"]
predicted_class = np.argmax(prediction)
predicted_label = class_labels[predicted_class]
print("Predicted class:", predicted_class)
print("Predicted label:", predicted_label)
Predicted class: 8 Predicted label: Ship
I evaluate now with another test data
x2=x_test[437]
plt.imshow(x2)
<matplotlib.image.AxesImage at 0x7fcb3c312620>
x2=x2.reshape((1, 32, 32, 3))
prediction = model.predict(x2)
print(prediction)
1/1 [==============================] - 0s 28ms/step [[3.4435237e-09 5.8759206e-11 3.9931943e-04 2.5441786e-03 5.0257989e-03 9.8802364e-01 3.9799828e-03 2.7241738e-05 4.7917020e-10 4.8024917e-09]]
predicted_class = np.argmax(prediction)
print(predicted_class)
5
predicted_class = np.argmax(prediction)
predicted_label = class_labels[predicted_class]
print("Predicted class:", predicted_class)
print("Predicted label:", predicted_label)
Predicted class: 5 Predicted label: Dog