Change args of optimizer in training
Created by: wanghaoshuang
forward_program = fluid.Program()
loss = None
with fluid.program_guad(main_program=forward_program):
input = fluid.layers.data(name="input")
label = fluid.layers.data(name="label")
out = fluid.layers.fc(input, size=10)
loss = fluid.layers.cross_entropy(out, label)
for epoch in range(10):
if epoch > 5:
lr = 0.01
train_program = forward_program.clone()
with fluid.program_guad(main_program=train_program):
loss = train_program.global_block().get_var(loss.name)
optimizer = fluid.layers.Adam(learning_rate=lr)
optimizer.minimize(loss)
exe.run(train_program)