Description
Kindly, I used EfficientNet B7 to classify face mask images with 2 classes, (Mask , No Mask) but the problem is with prediction from live video, the code of the training the model is shown below:
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications import EfficientNetB7
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
from tensorflow.keras.models import Model
from tensorflow.keras.applications.efficientnet import preprocess_input
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import itertools
import random
import os
import cv2
from sklearn import metrics
from pathlib import Path
target_size = (224,224)
batch_size = 8
lr = 0.01
n_epochs = 4
#root_dir = os.path.dirname(os.path.abspath(os.curdir))
data_dir = r"C:\Users\admin\Desktop\b7\a\dataset"
train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
train_generator = train_datagen.flow_from_directory(str(r"C:\Users\admin\Desktop\b7\a\dataset\train"),
target_size=target_size,
batch_size=batch_size,
class_mode='binary',
classes=['with_mask', 'without_mask'],
shuffle=True)
val_datagen_artificial = ImageDataGenerator(preprocessing_function=preprocess_input)
val_generator_artificial = val_datagen_artificial.flow_from_directory(str(r"C:\Users\admin\Desktop\b7\a\dataset\validation"),
target_size=target_size,
batch_size=batch_size,
class_mode='binary',
classes=['with_mask', 'without_mask'],
shuffle=False)
base_model = EfficientNetB7(weights='imagenet',include_top=False, input_shape=(target_size[0],target_size[1],3))
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(128,activation='relu')(x)
preds = Dense(1,activation='sigmoid')(x)
model = Model(inputs=base_model.input,outputs=preds)
for layer in model.layers[:-4]:
layer.trainable = False
opt = tf.keras.optimizers.Adam(learning_rate=lr)
model.compile(optimizer=opt,loss='binary_crossentropy',metrics=['accuracy'])
step_size_train = train_generator.n//train_generator.batch_size
step_size_val = val_generator_artificial.n//val_generator_artificial.batch_size
model.fit_generator(generator=train_generator, steps_per_epoch=step_size_train, epochs=n_epochs, validation_data=val_generator_artificial, validation_steps=step_size_val)
429/429 [==============================] - 911s 2s/step - loss: 0.0773 - accuracy: 0.9834 - val_loss: 0.0150 - val_accuracy: 0.9950
Epoch 2/4
429/429 [==============================] - 876s 2s/step - loss: 0.0422 - accuracy: 0.9927 - val_loss: 0.0569 - val_accuracy: 0.9875
Epoch 3/4
429/429 [==============================] - 874s 2s/step - loss: 0.0072 - accuracy: 0.9983 - val_loss: 0.0090 - val_accuracy: 0.9950
Epoch 4/4
429/429 [==============================] - 870s 2s/step - loss: 0.0283 - accuracy: 0.9948 - val_loss: 3.0768e-04 - val_accuracy: 1.0000
<keras.callbacks.History at 0x2534bdfdb80>
model.save("mask.model", save_format="h5")