diff --git a/python/paddle/v2/fluid/clip.py b/python/paddle/v2/fluid/clip.py index 776c0f3f0276cd228db9846e473c65d44e10bbb7..be0c2735f24c40f7c90b29a29fe47880acfedee1 100644 --- a/python/paddle/v2/fluid/clip.py +++ b/python/paddle/v2/fluid/clip.py @@ -4,6 +4,7 @@ from . import core __all__ = [ 'GradientClipByValue', + 'ErrorClipByValue', 'append_gradient_clip_ops', 'error_clip_callback', ] @@ -25,12 +26,12 @@ class ErrorClipByValue(BaseErrorClipAttr): self.min = min def append_clip_op(self, block, grad_name): - block.append_op( - type="clip", - inputs={"X": grad_name}, - outputs={"Out": grad_name}, - attrs={"min": self.min, - "max": self.max}) + clip_op_desc = block.desc.append_op() + clip_op_desc.set_type("clip") + clip_op_desc.set_input("X", [grad_name]) + clip_op_desc.set_output("Out", [grad_name]) + clip_op_desc.set_attr("min", self.min) + clip_op_desc.set_attr("max", self.max) def error_clip_callback(block, context): @@ -41,6 +42,11 @@ def error_clip_callback(block, context): op_desc.output_arg_names()): fwd_var = block.var_recursive(grad_to_var[grad_n]) error_clip = getattr(fwd_var, "error_clip", None) + if not (error_clip is None or isinstance(error_clip, + BaseErrorClipAttr)): + raise TypeError( + "Variable's error_clip should be an instance of BaseErrorClipAttr or None." + ) if error_clip is not None: error_clip.append_clip_op(block, grad_n) diff --git a/python/paddle/v2/fluid/framework.py b/python/paddle/v2/fluid/framework.py index bdbfe9da0772fdbd00dfc8ed00413ece56f48407..f78f2a331a6ea882df3368c54013b08c1a1debc5 100644 --- a/python/paddle/v2/fluid/framework.py +++ b/python/paddle/v2/fluid/framework.py @@ -280,6 +280,9 @@ class Variable(object): uid = core.unique_integer(prefix) # unique during whole process. return "_".join([prefix, str(uid)]) + def set_error_clip(self, error_clip): + self.error_clip = error_clip + def get_all_op_protos(): """ diff --git a/python/paddle/v2/fluid/tests/test_clip.py b/python/paddle/v2/fluid/tests/test_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..a71823f7e82d2f3007de3d17afa1770c688792d4 --- /dev/null +++ b/python/paddle/v2/fluid/tests/test_clip.py @@ -0,0 +1,67 @@ +from __future__ import print_function +import numpy as np +import paddle.v2 as paddle +import paddle.v2.fluid as fluid + +BATCH_SIZE = 128 +CLIP_MAX = 2e-6 +CLIP_MIN = -1e-6 + +prog = fluid.framework.Program() + +with fluid.program_guard(main_program=prog): + image = fluid.layers.data(name='x', shape=[784], dtype='float32') + + hidden1 = fluid.layers.fc(input=image, size=128, act='relu') + hidden2 = fluid.layers.fc(input=hidden1, size=64, act='relu') + predict = fluid.layers.fc(input=hidden2, size=10, act='softmax') + + label = fluid.layers.data(name='y', shape=[1], dtype='int64') + + cost = fluid.layers.cross_entropy(input=predict, label=label) + avg_cost = fluid.layers.mean(x=cost) + +prog_clip = prog.clone() +prog_clip.block(0).var(hidden1.name).set_error_clip( + fluid.clip.ErrorClipByValue( + max=CLIP_MAX, min=CLIP_MIN)) + +avg_cost_clip = prog_clip.block(0).var(avg_cost.name) +fluid.backward.append_backward(loss=avg_cost) +fluid.backward.append_backward( + loss=avg_cost_clip, callback=fluid.clip.error_clip_callback) + +hidden1_grad = prog.block(0).var(hidden1.name + "@GRAD") +hidden1_grad_clip = prog_clip.block(0).var(hidden1.name + "@GRAD") + +hidden2_grad = prog.block(0).var(hidden2.name + "@GRAD") +hidden2_grad_clip = prog_clip.block(0).var(hidden2.name + "@GRAD") + +train_reader = paddle.batch( + paddle.reader.shuffle( + paddle.dataset.mnist.train(), buf_size=8192), + batch_size=BATCH_SIZE) + +place = fluid.CPUPlace() +exe = fluid.Executor(place) +feeder = fluid.DataFeeder(feed_list=[image, label], place=place) +exe.run(fluid.default_startup_program()) + +count = 0 +for data in train_reader(): + count += 1 + if count > 5: + break + out1, out2 = exe.run(prog, + feed=feeder.feed(data), + fetch_list=[hidden1_grad, hidden2_grad]) + out1_clip, out2_clip = exe.run( + prog_clip, + feed=feeder.feed(data), + fetch_list=[hidden1_grad_clip, hidden2_grad_clip]) + if not ((out1.clip( + min=CLIP_MIN, max=CLIP_MAX) == out1_clip).all() and + (out2 == out2_clip).all()): + exit(1) + +exit(0)