from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense from keras.preprocessing.image import ImageDataGenerator from keras.losses import CategoricalCrossentropy from Func.getSubFolders import count_sub_folders path = 'Data_test' output = 'Model/pruned.h5' # Step 1: Load and Preprocess Images train_datagen = ImageDataGenerator( rescale=1. / 255, shear_range=0.2, zoom_range=0.2, horizontal_flip=True ) test_datagen = ImageDataGenerator(rescale=1. / 255) # Step 2: Label the Data train_set = train_datagen.flow_from_directory( path, target_size=(224, 224), batch_size=32, class_mode='categorical' ) test_set = test_datagen.flow_from_directory( path, target_size=(224, 224), batch_size=32, class_mode='categorical' ) # Step 4: Build the Model model = Sequential() model.add(Conv2D(32, (3, 3), input_shape=(224, 224, 3), activation='relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Flatten()) model.add(Dense(units=128, activation='relu')) model.add(Dense(units=count_sub_folders(path), activation='softmax')) # Compile the Model after pruning model.compile(optimizer='adam', loss=CategoricalCrossentropy(from_logits=False), metrics=['accuracy']) # Step 6: Train the Model model.fit(train_set, epochs=10, validation_data=test_set) # Step 7: Evaluate the Model loss, accuracy = model.evaluate(test_set) print(f'Test loss: {loss}, Test accuracy: {accuracy}') # Save the trained model model.save(output)