一、SGD
1、学习率在训练过程中自动下降方法。
# 方法一:设置decay
sgd = SGD(lr=0.01, decay=0.225, momentum=0.9, nesterov=True)
# 这种方法只能进行一次iteration(不是epoch)更新一次。
# 更新规则:lr = lr * (1. / (1. + decay * iterations)
参考:https://www.cnblogs.com/ranjiewen/p/8011021.html
# 方法二:利用回调函数callbacks
import math
def step_decay(epoch):
initial_lrate = 0.01 # 初始lr
drop = 0.1 # decay值
epochs_drop = 10.0 # 每epochs_drop个epoch更新一次lr
lrate = initial_lrate * math.pow(drop,math.floor((1+epoch)/epochs_drop))
return lrate
from keras.callbacks import LearningRateScheduler
lrate = LearningRateScheduler(step_decay)
sgd = SGD(lr=0.0, momentum=0.9, decay=0.0, nesterov=False)
hist = model.fit_generator(gen, steps_per_epoch=808, epochs=59, validation_data=(x_val, y_val), callbacks=[lrate], verbose=2)