"""定制resnet后面的層"""
def custom(input_size,num_classes,pretrain):
# 引入初始化resnet50模型
base_model = ResNet50(weights=pretrain,
include_top=False,
pooling=None,
input_shape=(input_size,input_size, 3),
classes=num_classes)
#由于有預權(quán)重,前部分凍結(jié),后面進行遷移學習
for layer in base_model.layers:
layer.trainable = False
#添加后面的層
x = base_model.output
x = layers.GlobalAveragePooling2D(name='avg_pool')(x)
x = layers.Dropout(0.5,name='dropout1')(x)
#regularizers正則化層,正則化器允許在優(yōu)化過程中對層的參數(shù)或?qū)拥募せ钋闆r進行懲罰
#對損失函數(shù)進行最小化的同時,也需要讓對參數(shù)添加限制,這個限制也就是正則化懲罰項,使用l2范數(shù)
x = layers.Dense(512,activation='relu',kernel_regularizer= regularizers.l2(0.0001),name='fc2')(x)
x = layers.BatchNormalization(name='bn_fc_01')(x)
x = layers.Dropout(0.5,name='dropout2')(x)
#40個分類
x = layers.Dense(num_classes,activation='softmax')(x)
model = Model(inputs=base_model.input,outputs=x)
#模型編譯
model.compile(optimizer="adam",loss = 'categorical_crossentropy',metrics=['accuracy'])
return model
網(wǎng)絡的訓練是遷移學習過程,使用已有的初始resnet50權(quán)重(5個stage已經(jīng)訓練過,卷積層已經(jīng)能夠提取特征),我們只訓練后面的全連接層部分,4個epoch后再對較后面的層進行訓練微調(diào)一下,獲得更高準確率,訓練過程如下:
class Net():
def __init__(self,img_size,gar_num,data_dir,batch_size,pretrain):
self.img_size=img_size
self.gar_num=gar_num
self.data_dir=data_dir
self.batch_size=batch_size
self.pretrain=pretrain
def build_train(self):
"""遷移學習"""
model = resnet.custom(self.img_size, self.gar_num, self.pretrain)
model.summary()
train_sequence, validation_sequence = genit.gendata(self.data_dir, self.batch_size, self.gar_num, self.img_size)
epochs=4
model.fit_generator(train_sequence,steps_per_epoch=len(train_sequence),epochs=epochs,verbose=1,validation_data=validation_sequence,
max_queue_size=10,shuffle=True)
#微調(diào),在實際工程中,激活函數(shù)也被算進層里,所以總共181層,微調(diào)是為了重新訓練部分卷積層,同時訓練最后的全連接層
layers=149
learning_rate=1e-4
for layer in model.layers[:layers]:
layer.trainable = False
for layer in model.layers[layers:]:
layer.trainable = True
Adam =adam(lr=learning_rate, decay=0.0005)
model.compile(optimizer=Adam, loss='categorical_crossentropy', metrics=['accuracy'])
model.fit_generator(train_sequence,steps_per_epoch=len(train_sequence),epochs=epochs * 2,verbose=1,
callbacks=[
callbacks.ModelCheckpoint('./models/garclass.h5',monitor='val_loss', save_best_only=True, mode='min'),
callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1,patience=10, mode='min'),
callbacks.EarlyStopping(monitor='val_loss', patience=10),],
validation_data=validation_sequence,max_queue_size=10,shuffle=True)
print('finish train,look for garclass.h5')