Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense
from keras.preprocessing.image import ImageDataGenerator
from Func.getSubFolders import count_sub_folders
# Step 1: Load and Preprocess Images
# You can use ImageDataGenerator for on-the-fly data augmentation and normalization
train_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
test_datagen = ImageDataGenerator(rescale=1. / 255)
# Step 2: Label the Data
# Assume you have two classes: 'cat' and 'dog'
train_set = train_datagen.flow_from_directory(
'Data/',
target_size=(224, 224), # image size
batch_size=32, # batch size
class_mode='categorical' # multiple folders in Data
)
test_set = test_datagen.flow_from_directory(
'Data',
target_size=(224, 224),
batch_size=32,
class_mode='categorical' # Use categorical for multiple classes
)
# Step 4: Build the Model
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape=(224, 224, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(units=128, activation='relu'))
model.add(Dense(units=count_sub_folders('Data'), activation='softmax'))
# Step 5: Compile the Model
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Step 6: Train the Model
# model.fit(train_set, epochs=25, validation_data=test_set)
# Step 7: Evaluate the Model
# loss, accuracy = model.evaluate(test_set)
# print(f'Test loss: {loss}, Test accuracy: {accuracy}')
# Step 8: Make Predictions
# Save the trained model
# model.save('Model/Model.h5')