提交 55edfca2 编写于 作者: Q Qiao Longfei

revert unused change

上级 fec0b192
...@@ -292,28 +292,26 @@ class Optimizer(object): ...@@ -292,28 +292,26 @@ class Optimizer(object):
This method combines interface `append_backward()` and This method combines interface `append_backward()` and
`create_optimization_pass()` into one. `create_optimization_pass()` into one.
""" """
with program_guard(loss.block.program, startup_program): params_grads = append_backward(loss, parameter_list, no_grad_set,
[error_clip_callback])
params_grads = append_backward(loss, parameter_list, no_grad_set, params_grads = sorted(params_grads, key=lambda x: x[0].name)
[error_clip_callback])
params_grads = sorted(params_grads, key=lambda x: x[0].name) params_grads, table_param_and_grad, table_optimize_op = \
self._process_distribute_lookuptable(params_grads, loss, startup_program)
params_grads, table_param_and_grad, table_optimize_op = \ params_grads = append_gradient_clip_ops(params_grads)
self._process_distribute_lookuptable(params_grads, loss, startup_program)
params_grads = append_gradient_clip_ops(params_grads) # Add regularization if any
params_grads = append_regularization_ops(params_grads,
self.regularization)
# Add regularization if any optimize_ops = self._create_optimization_pass(params_grads, loss,
params_grads = append_regularization_ops(params_grads, startup_program)
self.regularization) if table_optimize_op is not None:
optimize_ops.append(table_optimize_op)
optimize_ops = self._create_optimization_pass(params_grads, loss, params_grads.append(table_param_and_grad)
startup_program) return optimize_ops, params_grads
if table_optimize_op is not None:
optimize_ops.append(table_optimize_op)
params_grads.append(table_param_and_grad)
return optimize_ops, params_grads
class SGDOptimizer(Optimizer): class SGDOptimizer(Optimizer):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册