"No one is harder on a talented person than the person themselves" - Linda Wilkinson ; "Trust your guts and don't follow the herd" ; "Validate direction not destination" ;

December 16, 2019

Day #305 - Loading from Weights file HDF, saved models H5 files

We will look at
  • Vanilla Model
  • Load preexisting weights HDF5 and Continue
  • Load preexisting model H5 and Continue

from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from keras.callbacks import ModelCheckpoint, CSVLogger, EarlyStopping
import os
batch_size = 128
num_classes = 10
epochs = 5
log_file_path = r'E:\Landmark\mnist_training_log.log'
model_save_path = r"E:\Landmark\\mnist.h5"
weights_filepath="E:\\Landmark\\mnist-weights-improvement-{epoch:02d}.hdf5"
pre_weights_path = "E:\\Landmark\\mnist-weights-improvement-04.hdf5"
pre_model_h5_path = "E:\\Landmark\\mnist.h5"
# input image dimensions
img_rows, img_cols = 28, 28
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
from keras.models import load_model
def CreateModel():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),activation='relu',input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
return model
def LoadModelfromH5(model_h5_path):
if os.path.exists(model_h5_path):
print('Loading Definitions')
model = load_model(model_h5_path)
return model
def LoadModelWeights(pre_weights_path):
model = CreateModel()
model.load_weights(pre_weights_path)
return model
#Option 1
#model = CreateModel()
#Option 2
#model = LoadModelWeights(pre_weights_path)
#Option#3
model = LoadModelfromH5(pre_model_h5_path)
model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adadelta(),metrics=['accuracy'])
#Add Early Stop and Checkpoint
early_stop = EarlyStopping(monitor='val_loss', patience=5, verbose=1)
checkpoint = ModelCheckpoint(weights_filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='auto')
csv_logger = CSVLogger(log_file_path, append=False)
callbacks_list = [checkpoint,early_stop,csv_logger]
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test), callbacks=callbacks_list)
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
model.save(model_save_path)
import pandas as pd
import matplotlib.pyplot as plt
# Plot the Loss
file_name = log_file_path
df = pd.DataFrame.from_csv(file_name)
print(df.head())
training_loss = df['loss']
test_loss = df['val_loss']
print(training_loss)
print(test_loss)
epoch_count = range(1, len(training_loss) + 1)
plt.plot(epoch_count, training_loss, 'r--')
plt.plot(epoch_count, test_loss, 'b-')
plt.legend(['Training Loss', 'Test Loss'])
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show();
Results

Option #1 - Vanilla Model
Option #2 - Continue from Saved Weights
Option #3 - Continue from Saved Model H5 File


Happy Learning!!!

No comments: