! jupyter nbconvert --to html ///content/Copy_of_Resnet_CIFAR10.ipynb
[NbConvertApp] Converting notebook ///content/Copy_of_Resnet_CIFAR10.ipynb to html [NbConvertApp] Writing 1871917 bytes to /content/Copy_of_Resnet_CIFAR10.html
import tensorflow as tf
# Check if a GPU is available
if tf.test.gpu_device_name():
print('GPU device found:', tf.test.gpu_device_name())
else:
print("No GPU found")
No GPU found
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import numpy as np
from keras.initializers import he_normal
from tensorflow.keras.regularizers import l2
from keras.preprocessing.image import ImageDataGenerator
(X_train, Y_train), (X_test, Y_test) = keras.datasets.cifar10.load_data()
Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz 170498071/170498071 [==============================] - 6s 0us/step
# Further split the training set into training and validation sets
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.15, random_state=42)
input preprocessing: https://keras.io/api/applications/resnet/#resnet50-function
print("Training set shape:", X_train.shape, Y_train.shape)
print("Vaidation set shape:", X_val.shape, Y_val.shape)
print("Test set shape:", X_test.shape, Y_test.shape)
Training set shape: (42500, 32, 32, 3) (42500, 1) Vaidation set shape: (7500, 32, 32, 3) (7500, 1) Test set shape: (10000, 32, 32, 3) (10000, 1)
num_lasses=10
def preprocess_data (X,Y):
x=keras.applications.resnet.preprocess_input(X)
y= keras.utils.to_categorical(Y,num_lasses)
return x, y
X_train, Y_train= preprocess_data(X_train, Y_train)
X_val, Y_val= preprocess_data(X_val, Y_val)
X_test, Y_test= preprocess_data(X_test, Y_test)
Y_train[0]
array([1., 0., 0., 0., 0., 0., 0., 0., 0., 0.], dtype=float32)
print("Training set shape:", X_train.shape, Y_train.shape)
print("Vaidation set shape:", X_val.shape, Y_val.shape)
print("Test set shape:", X_test.shape, Y_test.shape)
Training set shape: (42500, 32, 32, 3) (42500, 10) Vaidation set shape: (7500, 32, 32, 3) (7500, 10) Test set shape: (10000, 32, 32, 3) (10000, 10)
# base model
ResNet_base = keras.applications.ResNet50(include_top=False,weights='imagenet',input_shape=(224,224,3))
# freeze all layers
ResNet_base.trainable = False
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 94765736/94765736 [==============================] - 0s 0us/step
ResNet_base.summary()
Model: "resnet50" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_1 (InputLayer) [(None, 224, 224, 3)] 0 [] conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_1[0][0]'] conv1_conv (Conv2D) (None, 112, 112, 64) 9472 ['conv1_pad[0][0]'] conv1_bn (BatchNormalizati (None, 112, 112, 64) 256 ['conv1_conv[0][0]'] on) conv1_relu (Activation) (None, 112, 112, 64) 0 ['conv1_bn[0][0]'] pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 ['conv1_relu[0][0]'] pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] conv2_block1_1_conv (Conv2 (None, 56, 56, 64) 4160 ['pool1_pool[0][0]'] D) conv2_block1_1_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] rmalization) conv2_block1_1_relu (Activ (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] ation) conv2_block1_2_conv (Conv2 (None, 56, 56, 64) 36928 ['conv2_block1_1_relu[0][0]'] D) conv2_block1_2_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] rmalization) conv2_block1_2_relu (Activ (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] ation) conv2_block1_0_conv (Conv2 (None, 56, 56, 256) 16640 ['pool1_pool[0][0]'] D) conv2_block1_3_conv (Conv2 (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] D) conv2_block1_0_bn (BatchNo (None, 56, 56, 256) 1024 ['conv2_block1_0_conv[0][0]'] rmalization) conv2_block1_3_bn (BatchNo (None, 56, 56, 256) 1024 ['conv2_block1_3_conv[0][0]'] rmalization) conv2_block1_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_bn[0][0]', 'conv2_block1_3_bn[0][0]'] conv2_block1_out (Activati (None, 56, 56, 256) 0 ['conv2_block1_add[0][0]'] on) conv2_block2_1_conv (Conv2 (None, 56, 56, 64) 16448 ['conv2_block1_out[0][0]'] D) conv2_block2_1_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] rmalization) conv2_block2_1_relu (Activ (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] ation) conv2_block2_2_conv (Conv2 (None, 56, 56, 64) 36928 ['conv2_block2_1_relu[0][0]'] D) conv2_block2_2_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] rmalization) conv2_block2_2_relu (Activ (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] ation) conv2_block2_3_conv (Conv2 (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] D) conv2_block2_3_bn (BatchNo (None, 56, 56, 256) 1024 ['conv2_block2_3_conv[0][0]'] rmalization) conv2_block2_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', 'conv2_block2_3_bn[0][0]'] conv2_block2_out (Activati (None, 56, 56, 256) 0 ['conv2_block2_add[0][0]'] on) conv2_block3_1_conv (Conv2 (None, 56, 56, 64) 16448 ['conv2_block2_out[0][0]'] D) conv2_block3_1_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] rmalization) conv2_block3_1_relu (Activ (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] ation) conv2_block3_2_conv (Conv2 (None, 56, 56, 64) 36928 ['conv2_block3_1_relu[0][0]'] D) conv2_block3_2_bn (BatchNo (None, 56, 56, 64) 256 ['conv2_block3_2_conv[0][0]'] rmalization) conv2_block3_2_relu (Activ (None, 56, 56, 64) 0 ['conv2_block3_2_bn[0][0]'] ation) conv2_block3_3_conv (Conv2 (None, 56, 56, 256) 16640 ['conv2_block3_2_relu[0][0]'] D) conv2_block3_3_bn (BatchNo (None, 56, 56, 256) 1024 ['conv2_block3_3_conv[0][0]'] rmalization) conv2_block3_add (Add) (None, 56, 56, 256) 0 ['conv2_block2_out[0][0]', 'conv2_block3_3_bn[0][0]'] conv2_block3_out (Activati (None, 56, 56, 256) 0 ['conv2_block3_add[0][0]'] on) conv3_block1_1_conv (Conv2 (None, 28, 28, 128) 32896 ['conv2_block3_out[0][0]'] D) conv3_block1_1_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] rmalization) conv3_block1_1_relu (Activ (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] ation) conv3_block1_2_conv (Conv2 (None, 28, 28, 128) 147584 ['conv3_block1_1_relu[0][0]'] D) conv3_block1_2_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] rmalization) conv3_block1_2_relu (Activ (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] ation) conv3_block1_0_conv (Conv2 (None, 28, 28, 512) 131584 ['conv2_block3_out[0][0]'] D) conv3_block1_3_conv (Conv2 (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] D) conv3_block1_0_bn (BatchNo (None, 28, 28, 512) 2048 ['conv3_block1_0_conv[0][0]'] rmalization) conv3_block1_3_bn (BatchNo (None, 28, 28, 512) 2048 ['conv3_block1_3_conv[0][0]'] rmalization) conv3_block1_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_bn[0][0]', 'conv3_block1_3_bn[0][0]'] conv3_block1_out (Activati (None, 28, 28, 512) 0 ['conv3_block1_add[0][0]'] on) conv3_block2_1_conv (Conv2 (None, 28, 28, 128) 65664 ['conv3_block1_out[0][0]'] D) conv3_block2_1_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] rmalization) conv3_block2_1_relu (Activ (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] ation) conv3_block2_2_conv (Conv2 (None, 28, 28, 128) 147584 ['conv3_block2_1_relu[0][0]'] D) conv3_block2_2_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] rmalization) conv3_block2_2_relu (Activ (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] ation) conv3_block2_3_conv (Conv2 (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] D) conv3_block2_3_bn (BatchNo (None, 28, 28, 512) 2048 ['conv3_block2_3_conv[0][0]'] rmalization) conv3_block2_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', 'conv3_block2_3_bn[0][0]'] conv3_block2_out (Activati (None, 28, 28, 512) 0 ['conv3_block2_add[0][0]'] on) conv3_block3_1_conv (Conv2 (None, 28, 28, 128) 65664 ['conv3_block2_out[0][0]'] D) conv3_block3_1_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] rmalization) conv3_block3_1_relu (Activ (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] ation) conv3_block3_2_conv (Conv2 (None, 28, 28, 128) 147584 ['conv3_block3_1_relu[0][0]'] D) conv3_block3_2_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] rmalization) conv3_block3_2_relu (Activ (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] ation) conv3_block3_3_conv (Conv2 (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] D) conv3_block3_3_bn (BatchNo (None, 28, 28, 512) 2048 ['conv3_block3_3_conv[0][0]'] rmalization) conv3_block3_add (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', 'conv3_block3_3_bn[0][0]'] conv3_block3_out (Activati (None, 28, 28, 512) 0 ['conv3_block3_add[0][0]'] on) conv3_block4_1_conv (Conv2 (None, 28, 28, 128) 65664 ['conv3_block3_out[0][0]'] D) conv3_block4_1_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] rmalization) conv3_block4_1_relu (Activ (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] ation) conv3_block4_2_conv (Conv2 (None, 28, 28, 128) 147584 ['conv3_block4_1_relu[0][0]'] D) conv3_block4_2_bn (BatchNo (None, 28, 28, 128) 512 ['conv3_block4_2_conv[0][0]'] rmalization) conv3_block4_2_relu (Activ (None, 28, 28, 128) 0 ['conv3_block4_2_bn[0][0]'] ation) conv3_block4_3_conv (Conv2 (None, 28, 28, 512) 66048 ['conv3_block4_2_relu[0][0]'] D) conv3_block4_3_bn (BatchNo (None, 28, 28, 512) 2048 ['conv3_block4_3_conv[0][0]'] rmalization) conv3_block4_add (Add) (None, 28, 28, 512) 0 ['conv3_block3_out[0][0]', 'conv3_block4_3_bn[0][0]'] conv3_block4_out (Activati (None, 28, 28, 512) 0 ['conv3_block4_add[0][0]'] on) conv4_block1_1_conv (Conv2 (None, 14, 14, 256) 131328 ['conv3_block4_out[0][0]'] D) conv4_block1_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] rmalization) conv4_block1_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] ation) conv4_block1_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block1_1_relu[0][0]'] D) conv4_block1_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] rmalization) conv4_block1_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] ation) conv4_block1_0_conv (Conv2 (None, 14, 14, 1024) 525312 ['conv3_block4_out[0][0]'] D) conv4_block1_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block1_2_relu[0][0]'] D) conv4_block1_0_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block1_0_conv[0][0]'] rmalization) conv4_block1_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block1_3_conv[0][0]'] rmalization) conv4_block1_add (Add) (None, 14, 14, 1024) 0 ['conv4_block1_0_bn[0][0]', 'conv4_block1_3_bn[0][0]'] conv4_block1_out (Activati (None, 14, 14, 1024) 0 ['conv4_block1_add[0][0]'] on) conv4_block2_1_conv (Conv2 (None, 14, 14, 256) 262400 ['conv4_block1_out[0][0]'] D) conv4_block2_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] rmalization) conv4_block2_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] ation) conv4_block2_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block2_1_relu[0][0]'] D) conv4_block2_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] rmalization) conv4_block2_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] ation) conv4_block2_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block2_2_relu[0][0]'] D) conv4_block2_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block2_3_conv[0][0]'] rmalization) conv4_block2_add (Add) (None, 14, 14, 1024) 0 ['conv4_block1_out[0][0]', 'conv4_block2_3_bn[0][0]'] conv4_block2_out (Activati (None, 14, 14, 1024) 0 ['conv4_block2_add[0][0]'] on) conv4_block3_1_conv (Conv2 (None, 14, 14, 256) 262400 ['conv4_block2_out[0][0]'] D) conv4_block3_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] rmalization) conv4_block3_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] ation) conv4_block3_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block3_1_relu[0][0]'] D) conv4_block3_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] rmalization) conv4_block3_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] ation) conv4_block3_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block3_2_relu[0][0]'] D) conv4_block3_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block3_3_conv[0][0]'] rmalization) conv4_block3_add (Add) (None, 14, 14, 1024) 0 ['conv4_block2_out[0][0]', 'conv4_block3_3_bn[0][0]'] conv4_block3_out (Activati (None, 14, 14, 1024) 0 ['conv4_block3_add[0][0]'] on) conv4_block4_1_conv (Conv2 (None, 14, 14, 256) 262400 ['conv4_block3_out[0][0]'] D) conv4_block4_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] rmalization) conv4_block4_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] ation) conv4_block4_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block4_1_relu[0][0]'] D) conv4_block4_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] rmalization) conv4_block4_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] ation) conv4_block4_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block4_2_relu[0][0]'] D) conv4_block4_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block4_3_conv[0][0]'] rmalization) conv4_block4_add (Add) (None, 14, 14, 1024) 0 ['conv4_block3_out[0][0]', 'conv4_block4_3_bn[0][0]'] conv4_block4_out (Activati (None, 14, 14, 1024) 0 ['conv4_block4_add[0][0]'] on) conv4_block5_1_conv (Conv2 (None, 14, 14, 256) 262400 ['conv4_block4_out[0][0]'] D) conv4_block5_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] rmalization) conv4_block5_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] ation) conv4_block5_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block5_1_relu[0][0]'] D) conv4_block5_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] rmalization) conv4_block5_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] ation) conv4_block5_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block5_2_relu[0][0]'] D) conv4_block5_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block5_3_conv[0][0]'] rmalization) conv4_block5_add (Add) (None, 14, 14, 1024) 0 ['conv4_block4_out[0][0]', 'conv4_block5_3_bn[0][0]'] conv4_block5_out (Activati (None, 14, 14, 1024) 0 ['conv4_block5_add[0][0]'] on) conv4_block6_1_conv (Conv2 (None, 14, 14, 256) 262400 ['conv4_block5_out[0][0]'] D) conv4_block6_1_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] rmalization) conv4_block6_1_relu (Activ (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] ation) conv4_block6_2_conv (Conv2 (None, 14, 14, 256) 590080 ['conv4_block6_1_relu[0][0]'] D) conv4_block6_2_bn (BatchNo (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] rmalization) conv4_block6_2_relu (Activ (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] ation) conv4_block6_3_conv (Conv2 (None, 14, 14, 1024) 263168 ['conv4_block6_2_relu[0][0]'] D) conv4_block6_3_bn (BatchNo (None, 14, 14, 1024) 4096 ['conv4_block6_3_conv[0][0]'] rmalization) conv4_block6_add (Add) (None, 14, 14, 1024) 0 ['conv4_block5_out[0][0]', 'conv4_block6_3_bn[0][0]'] conv4_block6_out (Activati (None, 14, 14, 1024) 0 ['conv4_block6_add[0][0]'] on) conv5_block1_1_conv (Conv2 (None, 7, 7, 512) 524800 ['conv4_block6_out[0][0]'] D) conv5_block1_1_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] rmalization) conv5_block1_1_relu (Activ (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] ation) conv5_block1_2_conv (Conv2 (None, 7, 7, 512) 2359808 ['conv5_block1_1_relu[0][0]'] D) conv5_block1_2_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] rmalization) conv5_block1_2_relu (Activ (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] ation) conv5_block1_0_conv (Conv2 (None, 7, 7, 2048) 2099200 ['conv4_block6_out[0][0]'] D) conv5_block1_3_conv (Conv2 (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] D) conv5_block1_0_bn (BatchNo (None, 7, 7, 2048) 8192 ['conv5_block1_0_conv[0][0]'] rmalization) conv5_block1_3_bn (BatchNo (None, 7, 7, 2048) 8192 ['conv5_block1_3_conv[0][0]'] rmalization) conv5_block1_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_bn[0][0]', 'conv5_block1_3_bn[0][0]'] conv5_block1_out (Activati (None, 7, 7, 2048) 0 ['conv5_block1_add[0][0]'] on) conv5_block2_1_conv (Conv2 (None, 7, 7, 512) 1049088 ['conv5_block1_out[0][0]'] D) conv5_block2_1_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] rmalization) conv5_block2_1_relu (Activ (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] ation) conv5_block2_2_conv (Conv2 (None, 7, 7, 512) 2359808 ['conv5_block2_1_relu[0][0]'] D) conv5_block2_2_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] rmalization) conv5_block2_2_relu (Activ (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] ation) conv5_block2_3_conv (Conv2 (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] D) conv5_block2_3_bn (BatchNo (None, 7, 7, 2048) 8192 ['conv5_block2_3_conv[0][0]'] rmalization) conv5_block2_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', 'conv5_block2_3_bn[0][0]'] conv5_block2_out (Activati (None, 7, 7, 2048) 0 ['conv5_block2_add[0][0]'] on) conv5_block3_1_conv (Conv2 (None, 7, 7, 512) 1049088 ['conv5_block2_out[0][0]'] D) conv5_block3_1_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] rmalization) conv5_block3_1_relu (Activ (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] ation) conv5_block3_2_conv (Conv2 (None, 7, 7, 512) 2359808 ['conv5_block3_1_relu[0][0]'] D) conv5_block3_2_bn (BatchNo (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] rmalization) conv5_block3_2_relu (Activ (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] ation) conv5_block3_3_conv (Conv2 (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] D) conv5_block3_3_bn (BatchNo (None, 7, 7, 2048) 8192 ['conv5_block3_3_conv[0][0]'] rmalization) conv5_block3_add (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', 'conv5_block3_3_bn[0][0]'] conv5_block3_out (Activati (None, 7, 7, 2048) 0 ['conv5_block3_add[0][0]'] on) ================================================================================================== Total params: 23587712 (89.98 MB) Trainable params: 0 (0.00 Byte) Non-trainable params: 23587712 (89.98 MB) __________________________________________________________________________________________________
from tensorflow.keras.utils import plot_model
plot_model(ResNet_base, to_file='model.png')
for i, layer in enumerate (ResNet_base.layers):
print(i,layer.name)
0 input_1 1 conv1_pad 2 conv1_conv 3 conv1_bn 4 conv1_relu 5 pool1_pad 6 pool1_pool 7 conv2_block1_1_conv 8 conv2_block1_1_bn 9 conv2_block1_1_relu 10 conv2_block1_2_conv 11 conv2_block1_2_bn 12 conv2_block1_2_relu 13 conv2_block1_0_conv 14 conv2_block1_3_conv 15 conv2_block1_0_bn 16 conv2_block1_3_bn 17 conv2_block1_add 18 conv2_block1_out 19 conv2_block2_1_conv 20 conv2_block2_1_bn 21 conv2_block2_1_relu 22 conv2_block2_2_conv 23 conv2_block2_2_bn 24 conv2_block2_2_relu 25 conv2_block2_3_conv 26 conv2_block2_3_bn 27 conv2_block2_add 28 conv2_block2_out 29 conv2_block3_1_conv 30 conv2_block3_1_bn 31 conv2_block3_1_relu 32 conv2_block3_2_conv 33 conv2_block3_2_bn 34 conv2_block3_2_relu 35 conv2_block3_3_conv 36 conv2_block3_3_bn 37 conv2_block3_add 38 conv2_block3_out 39 conv3_block1_1_conv 40 conv3_block1_1_bn 41 conv3_block1_1_relu 42 conv3_block1_2_conv 43 conv3_block1_2_bn 44 conv3_block1_2_relu 45 conv3_block1_0_conv 46 conv3_block1_3_conv 47 conv3_block1_0_bn 48 conv3_block1_3_bn 49 conv3_block1_add 50 conv3_block1_out 51 conv3_block2_1_conv 52 conv3_block2_1_bn 53 conv3_block2_1_relu 54 conv3_block2_2_conv 55 conv3_block2_2_bn 56 conv3_block2_2_relu 57 conv3_block2_3_conv 58 conv3_block2_3_bn 59 conv3_block2_add 60 conv3_block2_out 61 conv3_block3_1_conv 62 conv3_block3_1_bn 63 conv3_block3_1_relu 64 conv3_block3_2_conv 65 conv3_block3_2_bn 66 conv3_block3_2_relu 67 conv3_block3_3_conv 68 conv3_block3_3_bn 69 conv3_block3_add 70 conv3_block3_out 71 conv3_block4_1_conv 72 conv3_block4_1_bn 73 conv3_block4_1_relu 74 conv3_block4_2_conv 75 conv3_block4_2_bn 76 conv3_block4_2_relu 77 conv3_block4_3_conv 78 conv3_block4_3_bn 79 conv3_block4_add 80 conv3_block4_out 81 conv4_block1_1_conv 82 conv4_block1_1_bn 83 conv4_block1_1_relu 84 conv4_block1_2_conv 85 conv4_block1_2_bn 86 conv4_block1_2_relu 87 conv4_block1_0_conv 88 conv4_block1_3_conv 89 conv4_block1_0_bn 90 conv4_block1_3_bn 91 conv4_block1_add 92 conv4_block1_out 93 conv4_block2_1_conv 94 conv4_block2_1_bn 95 conv4_block2_1_relu 96 conv4_block2_2_conv 97 conv4_block2_2_bn 98 conv4_block2_2_relu 99 conv4_block2_3_conv 100 conv4_block2_3_bn 101 conv4_block2_add 102 conv4_block2_out 103 conv4_block3_1_conv 104 conv4_block3_1_bn 105 conv4_block3_1_relu 106 conv4_block3_2_conv 107 conv4_block3_2_bn 108 conv4_block3_2_relu 109 conv4_block3_3_conv 110 conv4_block3_3_bn 111 conv4_block3_add 112 conv4_block3_out 113 conv4_block4_1_conv 114 conv4_block4_1_bn 115 conv4_block4_1_relu 116 conv4_block4_2_conv 117 conv4_block4_2_bn 118 conv4_block4_2_relu 119 conv4_block4_3_conv 120 conv4_block4_3_bn 121 conv4_block4_add 122 conv4_block4_out 123 conv4_block5_1_conv 124 conv4_block5_1_bn 125 conv4_block5_1_relu 126 conv4_block5_2_conv 127 conv4_block5_2_bn 128 conv4_block5_2_relu 129 conv4_block5_3_conv 130 conv4_block5_3_bn 131 conv4_block5_add 132 conv4_block5_out 133 conv4_block6_1_conv 134 conv4_block6_1_bn 135 conv4_block6_1_relu 136 conv4_block6_2_conv 137 conv4_block6_2_bn 138 conv4_block6_2_relu 139 conv4_block6_3_conv 140 conv4_block6_3_bn 141 conv4_block6_add 142 conv4_block6_out 143 conv5_block1_1_conv 144 conv5_block1_1_bn 145 conv5_block1_1_relu 146 conv5_block1_2_conv 147 conv5_block1_2_bn 148 conv5_block1_2_relu 149 conv5_block1_0_conv 150 conv5_block1_3_conv 151 conv5_block1_0_bn 152 conv5_block1_3_bn 153 conv5_block1_add 154 conv5_block1_out 155 conv5_block2_1_conv 156 conv5_block2_1_bn 157 conv5_block2_1_relu 158 conv5_block2_2_conv 159 conv5_block2_2_bn 160 conv5_block2_2_relu 161 conv5_block2_3_conv 162 conv5_block2_3_bn 163 conv5_block2_add 164 conv5_block2_out 165 conv5_block3_1_conv 166 conv5_block3_1_bn 167 conv5_block3_1_relu 168 conv5_block3_2_conv 169 conv5_block3_2_bn 170 conv5_block3_2_relu 171 conv5_block3_3_conv 172 conv5_block3_3_bn 173 conv5_block3_add 174 conv5_block3_out
len(ResNet_base.layers)
175
inputs = tf.keras.Input(shape=(32, 32, 3))
input = tf.keras.layers.Lambda(lambda image: tf.image.resize(image, (224, 224)))(inputs)
x=ResNet_base(input,training= False)
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dropout(0.3)(x)
outputs = tf.keras.layers.Dense(10, activation='softmax')(x)
model = tf.keras.Model(inputs, outputs)
#check if the base model is freezed:
ResNet_base.trainable=False
model.compile(loss="categorical_crossentropy",
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
metrics=["accuracy"])
tf.random.set_seed(42)
history = model.fit(
X_train, Y_train,
validation_data=(X_val, Y_val), batch_size=128, epochs=10, verbose=1
)
Epoch 1/10 333/333 [==============================] - 139s 409ms/step - loss: 0.5527 - accuracy: 0.8105 - val_loss: 0.3218 - val_accuracy: 0.8896 Epoch 2/10 333/333 [==============================] - 135s 405ms/step - loss: 0.3419 - accuracy: 0.8804 - val_loss: 0.2790 - val_accuracy: 0.9013 Epoch 3/10 333/333 [==============================] - 135s 405ms/step - loss: 0.3100 - accuracy: 0.8919 - val_loss: 0.2712 - val_accuracy: 0.9057 Epoch 4/10 333/333 [==============================] - 135s 405ms/step - loss: 0.2890 - accuracy: 0.8975 - val_loss: 0.2837 - val_accuracy: 0.9027 Epoch 5/10 333/333 [==============================] - 135s 405ms/step - loss: 0.2820 - accuracy: 0.9013 - val_loss: 0.2715 - val_accuracy: 0.9084 Epoch 6/10 333/333 [==============================] - 135s 405ms/step - loss: 0.2749 - accuracy: 0.9026 - val_loss: 0.2636 - val_accuracy: 0.9111 Epoch 7/10 333/333 [==============================] - 135s 406ms/step - loss: 0.2687 - accuracy: 0.9056 - val_loss: 0.2794 - val_accuracy: 0.9068 Epoch 8/10 333/333 [==============================] - 135s 406ms/step - loss: 0.2697 - accuracy: 0.9062 - val_loss: 0.2625 - val_accuracy: 0.9117 Epoch 9/10 333/333 [==============================] - 134s 404ms/step - loss: 0.2651 - accuracy: 0.9076 - val_loss: 0.2610 - val_accuracy: 0.9065 Epoch 10/10 333/333 [==============================] - 135s 406ms/step - loss: 0.2630 - accuracy: 0.9084 - val_loss: 0.2552 - val_accuracy: 0.9156
fig, ax = plt.subplots(figsize=(12,7))
ax.plot(history.history["loss"],label="train_loss")
ax.plot(history.history["accuracy"],label="train_accuracy")
ax.plot(history.history["val_loss"],label = "val_loss")
ax.plot(history.history["val_accuracy"],label = 'val_accuracy')
ax.legend()
<matplotlib.legend.Legend at 0x7af5bc790b50>
model.evaluate(X_test, Y_test)
313/313 [==============================] - 30s 95ms/step - loss: 0.2748 - accuracy: 0.9086
[0.2748320698738098, 0.9085999727249146]
ResNet_base = keras.applications.ResNet50(include_top=False,weights='imagenet',input_shape=(224,224,3))
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 94765736/94765736 [==============================] - 0s 0us/step
(X_train, Y_train), (X_test, Y_test) = keras.datasets.cifar10.load_data()
# Further split the training set into training and validation sets
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.15, random_state=42)
num_lasses=10
def preprocess_data (X,Y):
x=keras.applications.resnet.preprocess_input(X)
y= keras.utils.to_categorical(Y,num_lasses)
return x, y
X_train, Y_train= preprocess_data(X_train, Y_train)
X_val, Y_val= preprocess_data(X_val, Y_val)
X_test, Y_test= preprocess_data(X_test, Y_test)
tf.random.set_seed(42)
# Create an instance of ImageDataGenerator for data augmentation
datagen = ImageDataGenerator(
rotation_range=10,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True,
vertical_flip=False,
fill_mode='nearest'
)
# Flow the training data through the data generator
train_generator = datagen.flow(X_train, Y_train, batch_size=128)
inputs = tf.keras.Input(shape=(32, 32, 3))
input = tf.keras.layers.Lambda(lambda image: tf.image.resize(image, (224, 224)))(inputs)
x=ResNet_base(input,training= False)
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dropout(0.4)(x)
outputs = tf.keras.layers.Dense(10, activation='softmax')(x)
model = tf.keras.Model(inputs, outputs)
# Number of layers from the end to make trainable
num_train_layers=10
num_layers_untrain=len(ResNet_base.layers)-num_train_layers
for layer in ResNet_base.layers[:num_layers_untrain]:
layer.trainable = False
for layer in ResNet_base.layers[num_layers_untrain:]:
layer.trainable = True
initial_learning_rate = 0.00001
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate, decay_steps=10000, decay_rate=0.9, staircase=True)
model.compile(loss="categorical_crossentropy",
optimizer=tf.keras.optimizers.Adam(learning_rate=lr_schedule),
metrics=["accuracy"])
from tensorflow.keras.callbacks import EarlyStopping
# Set a seed for reproducibility
tf.random.set_seed(42)
# Define the Early Stopping callback
early_stopping = EarlyStopping(monitor='val_loss', patience=30, restore_best_weights=True)
# Training the model with Early Stopping
history = model.fit(
train_generator,
epochs=150,
validation_data=(X_val, Y_val),
batch_size=128,
verbose=1,
callbacks=[early_stopping] # Include the Early Stopping callback
)
Epoch 1/150 333/333 [==============================] - 192s 528ms/step - loss: 1.4933 - accuracy: 0.5129 - val_loss: 0.4770 - val_accuracy: 0.8343 Epoch 2/150 333/333 [==============================] - 175s 525ms/step - loss: 0.6729 - accuracy: 0.7734 - val_loss: 0.3727 - val_accuracy: 0.8691 Epoch 3/150 333/333 [==============================] - 155s 466ms/step - loss: 0.5491 - accuracy: 0.8104 - val_loss: 0.3272 - val_accuracy: 0.8843 Epoch 4/150 333/333 [==============================] - 174s 524ms/step - loss: 0.4804 - accuracy: 0.8345 - val_loss: 0.3134 - val_accuracy: 0.8911 Epoch 5/150 333/333 [==============================] - 175s 524ms/step - loss: 0.4424 - accuracy: 0.8473 - val_loss: 0.2924 - val_accuracy: 0.8983 Epoch 6/150 333/333 [==============================] - 174s 524ms/step - loss: 0.4081 - accuracy: 0.8601 - val_loss: 0.2758 - val_accuracy: 0.9023 Epoch 7/150 333/333 [==============================] - 174s 524ms/step - loss: 0.3815 - accuracy: 0.8662 - val_loss: 0.2690 - val_accuracy: 0.9047 Epoch 8/150 333/333 [==============================] - 174s 524ms/step - loss: 0.3644 - accuracy: 0.8746 - val_loss: 0.2612 - val_accuracy: 0.9089 Epoch 9/150 333/333 [==============================] - 155s 465ms/step - loss: 0.3489 - accuracy: 0.8789 - val_loss: 0.2515 - val_accuracy: 0.9123 Epoch 10/150 333/333 [==============================] - 174s 524ms/step - loss: 0.3303 - accuracy: 0.8848 - val_loss: 0.2486 - val_accuracy: 0.9119 Epoch 11/150 333/333 [==============================] - 174s 524ms/step - loss: 0.3166 - accuracy: 0.8906 - val_loss: 0.2358 - val_accuracy: 0.9173 Epoch 12/150 333/333 [==============================] - 155s 465ms/step - loss: 0.3032 - accuracy: 0.8936 - val_loss: 0.2349 - val_accuracy: 0.9163 Epoch 13/150 333/333 [==============================] - 155s 464ms/step - loss: 0.2908 - accuracy: 0.8994 - val_loss: 0.2373 - val_accuracy: 0.9169 Epoch 14/150 333/333 [==============================] - 174s 523ms/step - loss: 0.2754 - accuracy: 0.9036 - val_loss: 0.2314 - val_accuracy: 0.9197 Epoch 15/150 333/333 [==============================] - 175s 524ms/step - loss: 0.2744 - accuracy: 0.9048 - val_loss: 0.2216 - val_accuracy: 0.9255 Epoch 16/150 333/333 [==============================] - 175s 524ms/step - loss: 0.2624 - accuracy: 0.9091 - val_loss: 0.2170 - val_accuracy: 0.9257 Epoch 17/150 333/333 [==============================] - 175s 524ms/step - loss: 0.2531 - accuracy: 0.9106 - val_loss: 0.2163 - val_accuracy: 0.9264 Epoch 18/150 333/333 [==============================] - 175s 524ms/step - loss: 0.2453 - accuracy: 0.9126 - val_loss: 0.2084 - val_accuracy: 0.9283 Epoch 19/150 333/333 [==============================] - 155s 465ms/step - loss: 0.2374 - accuracy: 0.9161 - val_loss: 0.2137 - val_accuracy: 0.9287 Epoch 20/150 333/333 [==============================] - 174s 524ms/step - loss: 0.2346 - accuracy: 0.9187 - val_loss: 0.2076 - val_accuracy: 0.9301 Epoch 21/150 333/333 [==============================] - 155s 465ms/step - loss: 0.2217 - accuracy: 0.9217 - val_loss: 0.2120 - val_accuracy: 0.9287 Epoch 22/150 333/333 [==============================] - 174s 523ms/step - loss: 0.2173 - accuracy: 0.9242 - val_loss: 0.2097 - val_accuracy: 0.9293 Epoch 23/150 333/333 [==============================] - 174s 524ms/step - loss: 0.2060 - accuracy: 0.9264 - val_loss: 0.2104 - val_accuracy: 0.9307 Epoch 24/150 333/333 [==============================] - 174s 524ms/step - loss: 0.2017 - accuracy: 0.9285 - val_loss: 0.2081 - val_accuracy: 0.9340 Epoch 25/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1906 - accuracy: 0.9333 - val_loss: 0.2013 - val_accuracy: 0.9323 Epoch 26/150 333/333 [==============================] - 174s 524ms/step - loss: 0.1906 - accuracy: 0.9321 - val_loss: 0.2009 - val_accuracy: 0.9345 Epoch 27/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1870 - accuracy: 0.9347 - val_loss: 0.2090 - val_accuracy: 0.9299 Epoch 28/150 333/333 [==============================] - 155s 464ms/step - loss: 0.1811 - accuracy: 0.9359 - val_loss: 0.2043 - val_accuracy: 0.9320 Epoch 29/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1748 - accuracy: 0.9384 - val_loss: 0.2044 - val_accuracy: 0.9341 Epoch 30/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1663 - accuracy: 0.9408 - val_loss: 0.1977 - val_accuracy: 0.9361 Epoch 31/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1566 - accuracy: 0.9432 - val_loss: 0.2096 - val_accuracy: 0.9340 Epoch 32/150 333/333 [==============================] - 175s 525ms/step - loss: 0.1583 - accuracy: 0.9441 - val_loss: 0.1981 - val_accuracy: 0.9363 Epoch 33/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1528 - accuracy: 0.9468 - val_loss: 0.2080 - val_accuracy: 0.9335 Epoch 34/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1498 - accuracy: 0.9463 - val_loss: 0.1957 - val_accuracy: 0.9384 Epoch 35/150 333/333 [==============================] - 174s 523ms/step - loss: 0.1463 - accuracy: 0.9483 - val_loss: 0.1953 - val_accuracy: 0.9372 Epoch 36/150 333/333 [==============================] - 155s 466ms/step - loss: 0.1408 - accuracy: 0.9491 - val_loss: 0.2131 - val_accuracy: 0.9365 Epoch 37/150 333/333 [==============================] - 174s 523ms/step - loss: 0.1367 - accuracy: 0.9505 - val_loss: 0.2084 - val_accuracy: 0.9343 Epoch 38/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1381 - accuracy: 0.9517 - val_loss: 0.1968 - val_accuracy: 0.9392 Epoch 39/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1270 - accuracy: 0.9543 - val_loss: 0.1989 - val_accuracy: 0.9373 Epoch 40/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1303 - accuracy: 0.9535 - val_loss: 0.2066 - val_accuracy: 0.9352 Epoch 41/150 333/333 [==============================] - 174s 524ms/step - loss: 0.1272 - accuracy: 0.9548 - val_loss: 0.2068 - val_accuracy: 0.9372 Epoch 42/150 333/333 [==============================] - 175s 525ms/step - loss: 0.1218 - accuracy: 0.9565 - val_loss: 0.2106 - val_accuracy: 0.9343 Epoch 43/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1182 - accuracy: 0.9584 - val_loss: 0.2034 - val_accuracy: 0.9381 Epoch 44/150 333/333 [==============================] - 174s 523ms/step - loss: 0.1162 - accuracy: 0.9586 - val_loss: 0.2084 - val_accuracy: 0.9365 Epoch 45/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1152 - accuracy: 0.9597 - val_loss: 0.2052 - val_accuracy: 0.9389 Epoch 46/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1072 - accuracy: 0.9616 - val_loss: 0.2072 - val_accuracy: 0.9372 Epoch 47/150 333/333 [==============================] - 174s 523ms/step - loss: 0.1104 - accuracy: 0.9607 - val_loss: 0.1988 - val_accuracy: 0.9392 Epoch 48/150 333/333 [==============================] - 175s 524ms/step - loss: 0.1040 - accuracy: 0.9622 - val_loss: 0.2016 - val_accuracy: 0.9401 Epoch 49/150 333/333 [==============================] - 174s 524ms/step - loss: 0.1007 - accuracy: 0.9647 - val_loss: 0.2092 - val_accuracy: 0.9385 Epoch 50/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1005 - accuracy: 0.9641 - val_loss: 0.2682 - val_accuracy: 0.9219 Epoch 51/150 333/333 [==============================] - 155s 465ms/step - loss: 0.1017 - accuracy: 0.9636 - val_loss: 0.2076 - val_accuracy: 0.9388 Epoch 52/150 333/333 [==============================] - 155s 465ms/step - loss: 0.0934 - accuracy: 0.9672 - val_loss: 0.2088 - val_accuracy: 0.9383 Epoch 53/150 333/333 [==============================] - 155s 465ms/step - loss: 0.0934 - accuracy: 0.9663 - val_loss: 0.2108 - val_accuracy: 0.9373 Epoch 54/150 333/333 [==============================] - 154s 463ms/step - loss: 0.0935 - accuracy: 0.9660 - val_loss: 0.2125 - val_accuracy: 0.9387 Epoch 55/150 333/333 [==============================] - 155s 465ms/step - loss: 0.0874 - accuracy: 0.9696 - val_loss: 0.2063 - val_accuracy: 0.9396 Epoch 56/150 333/333 [==============================] - 174s 524ms/step - loss: 0.0853 - accuracy: 0.9696 - val_loss: 0.2098 - val_accuracy: 0.9391 Epoch 57/150 333/333 [==============================] - 155s 465ms/step - loss: 0.0856 - accuracy: 0.9694 - val_loss: 0.2184 - val_accuracy: 0.9388 Epoch 58/150 333/333 [==============================] - 154s 464ms/step - loss: 0.0848 - accuracy: 0.9705 - val_loss: 0.2092 - val_accuracy: 0.9397 Epoch 59/150 333/333 [==============================] - 174s 522ms/step - loss: 0.0822 - accuracy: 0.9715 - val_loss: 0.2054 - val_accuracy: 0.9416 Epoch 60/150 333/333 [==============================] - 155s 465ms/step - loss: 0.0809 - accuracy: 0.9708 - val_loss: 0.2102 - val_accuracy: 0.9409 Epoch 61/150 333/333 [==============================] - 174s 523ms/step - loss: 0.0805 - accuracy: 0.9711 - val_loss: 0.2103 - val_accuracy: 0.9404 Epoch 62/150 333/333 [==============================] - 175s 524ms/step - loss: 0.0781 - accuracy: 0.9724 - val_loss: 0.2128 - val_accuracy: 0.9393 Epoch 63/150 333/333 [==============================] - 174s 524ms/step - loss: 0.0759 - accuracy: 0.9727 - val_loss: 0.2153 - val_accuracy: 0.9395 Epoch 64/150 333/333 [==============================] - 174s 524ms/step - loss: 0.0714 - accuracy: 0.9750 - val_loss: 0.2179 - val_accuracy: 0.9395 Epoch 65/150 333/333 [==============================] - 175s 525ms/step - loss: 0.0727 - accuracy: 0.9746 - val_loss: 0.2138 - val_accuracy: 0.9399
fig, ax = plt.subplots(figsize=(12,7))
ax.plot(history.history["loss"],label="train_loss")
ax.plot(history.history["accuracy"],label="train_accuracy")
ax.plot(history.history["val_loss"],label = "val_loss")
ax.plot(history.history["val_accuracy"],label = 'val_accuracy')
ax.legend()
<matplotlib.legend.Legend at 0x7e5ab062ab90>