Skip to content
Snippets Groups Projects
trainModel.py 4 KiB
Newer Older
import time
Michiel_VE's avatar
Michiel_VE committed
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense
from keras.preprocessing.image import ImageDataGenerator
Michiel_VE's avatar
Michiel_VE committed
from keras.losses import CategoricalCrossentropy
Michiel_VE's avatar
Michiel_VE committed
from keras.src.callbacks import EarlyStopping
from keras.src.optimizers import Adam
Michiel_VE's avatar
Michiel_VE committed
import tensorflow as tf
from keras import layers, models
Michiel_VE's avatar
Michiel_VE committed

Michiel_VE's avatar
Michiel_VE committed
from Func.getSubFolders import count_sub_folders

path = 'Data'
output = 'Model/keras_model.h5'
start_time = time.time()
Michiel_VE's avatar
Michiel_VE committed

Michiel_VE's avatar
Michiel_VE committed
# Step 1: Load and Preprocess Images
datagen = ImageDataGenerator(
Michiel_VE's avatar
Michiel_VE committed
    rescale=1. / 255,
    validation_split=0.2,
    width_shift_range=0.2,
    height_shift_range=0.2,
Michiel_VE's avatar
Michiel_VE committed
    shear_range=0.2,
    zoom_range=0.2,
)

test_datagen = ImageDataGenerator(rescale=1. / 255)

# Step 2: Label the Data
train_set = datagen.flow_from_directory(
Michiel_VE's avatar
Michiel_VE committed
    path,
    target_size=(224, 224),
    batch_size=32,
    class_mode='categorical',
    subset='training'
test_set = datagen.flow_from_directory(
Michiel_VE's avatar
Michiel_VE committed
    path,
Michiel_VE's avatar
Michiel_VE committed
    target_size=(224, 224),
Michiel_VE's avatar
Michiel_VE committed
    batch_size=32,
    class_mode='categorical',
    subset='validation'
Michiel_VE's avatar
Michiel_VE committed
)

# Step 4: Build the Model
model = Sequential()
Michiel_VE's avatar
Michiel_VE committed
model.add(Conv2D(32, (3, 3), input_shape=(224, 224, 3), activation='relu'))
Michiel_VE's avatar
Michiel_VE committed
model.add(MaxPooling2D(pool_size=(2, 2)))
Michiel_VE's avatar
Michiel_VE committed

model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2)))
Michiel_VE's avatar
Michiel_VE committed

model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2)))
Michiel_VE's avatar
Michiel_VE committed

model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2)))
Michiel_VE's avatar
Michiel_VE committed

Michiel_VE's avatar
Michiel_VE committed
model.add(Flatten())
model.add(Dense(units=256, activation='relu'))
Michiel_VE's avatar
Michiel_VE committed
model.add(Dense(units=128, activation='relu'))
Michiel_VE's avatar
Michiel_VE committed
model.add(Dense(units=count_sub_folders(path), activation='softmax'))
Michiel_VE's avatar
Michiel_VE committed
# def createLayers(input_shape=(224, 224, 3)):
#     inputs = tf.keras.Input(shape=input_shape)
#
#     x = layers.Conv2D(48, (1, 1), padding='same', use_bias=False, name='block_1_expand')(inputs)
#     x = layers.BatchNormalization(name='block_1_expand_BN')(x)
#     x = layers.ReLU(6., name='block_1_expand_relu')(x)
#
#     x = layers.DepthwiseConv2D((3, 3), padding='same', use_bias=False, name='block_1_depthwise')(x)
#     x = layers.BatchNormalization(name='block_1_depthwise_BN')(x)
#     x = layers.ReLU(6., name='block_1_depthwise_relu')(x)
#
#     x = layers.Conv2D(8, (1, 1), padding='same', use_bias=False, name='block_1_project')(x)
#     x = layers.BatchNormalization(name='block_1_project_BN')(x)
#
#     for i in range(2,5):
#         x1 = layers.Conv2D(48, (1, 1), padding='same', use_bias=False, name=f'block_{i}_expand')(x)
#         x1 = layers.BatchNormalization(name=f'block_{i}_expand_BN')(x1)
#         x1 = layers.ReLU(6., name=f'block_{i}_expand_relu')(x1)
#
#         x1 = layers.DepthwiseConv2D((3, 3), padding='same', use_bias=False, name=f'block_{i}_depthwise')(x1)
#         x1 = layers.BatchNormalization(name=f'block_{i}_depthwise_BN')(x1)
#         x1 = layers.ReLU(6., name=f'block_{i}_depthwise_relu')(x1)
#
#         x1 = layers.Conv2D(8, (1, 1), padding='same', use_bias=False, name=f'block_{i}_project')(x1)
#         x1 = layers.BatchNormalization(name=f'block_{i}_project_BN')(x1)
#
#         x = layers.Add(name=f'block_{i}_add')([x, x1])
#
#     x = tf.keras.layers.GlobalAveragePooling2D()(x)
#     outputs = tf.keras.layers.Dense(count_sub_folders(path), activation='softmax')(x)
#     model = models.Model(inputs, outputs, name='testModel')
#
#     return model
#
#
# model = createLayers()
Michiel_VE's avatar
Michiel_VE committed

# Compile the Model after pruning
model.compile(optimizer=Adam(learning_rate=0.001),
Michiel_VE's avatar
Michiel_VE committed
              loss=CategoricalCrossentropy(from_logits=False),
              metrics=['accuracy'])
Michiel_VE's avatar
Michiel_VE committed

# Step 6: Train the Model
Michiel_VE's avatar
Michiel_VE committed
early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)

model.fit(train_set, validation_data=test_set, epochs=10)
Michiel_VE's avatar
Michiel_VE committed
# Step 7: Evaluate the Model
loss, accuracy = model.evaluate(test_set)
print(f'Test loss: {loss}, Test accuracy: {accuracy}')
Michiel_VE's avatar
Michiel_VE committed

# Save the trained model
Michiel_VE's avatar
Michiel_VE committed
model.save(output)
end_time = time.time()

execute_time = (end_time - start_time) / 60

Michiel_VE's avatar
Michiel_VE committed
model.summary()

# Print the result
Michiel_VE's avatar
Michiel_VE committed
print(f"It took: {execute_time:0.2f} minutes")