From c9fc7ba9f8c012b8b5fade39541be757e5ca0d7b Mon Sep 17 00:00:00 2001 From: Yang Yu Date: Wed, 8 Nov 2017 17:06:59 -0800 Subject: [PATCH] Do not sum output if that output is not a gradient * increament is default inplace --- paddle/framework/backward.cc | 5 +++++ python/paddle/v2/framework/layers.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/paddle/framework/backward.cc b/paddle/framework/backward.cc index ed94540c2..b6a206157 100644 --- a/paddle/framework/backward.cc +++ b/paddle/framework/backward.cc @@ -408,6 +408,11 @@ std::vector> MakeBlockBackward( for (const auto& desc : op_grads) { for (const std::string& out_name : desc->OutputArgumentNames()) { + if (out_name.find("@GRAD") == std::string::npos) { + // Not all outputs of a backward operator is a gradient. Only gradient + // need to be sum. Skip variables are not gradient. + continue; + } dup_out_ops[out_name].emplace_back(grad_desc_idx); } ++grad_desc_idx; diff --git a/python/paddle/v2/framework/layers.py b/python/paddle/v2/framework/layers.py index 7e1ec10ef..a5536c357 100644 --- a/python/paddle/v2/framework/layers.py +++ b/python/paddle/v2/framework/layers.py @@ -823,7 +823,7 @@ def zeros(shape, dtype, main_program=None): return fill_constant(value=0.0, **locals()) -def increment(x, value=1.0, in_place=False, main_program=None): +def increment(x, value=1.0, in_place=True, main_program=None): helper = LayerHelper("increment", **locals()) if in_place: tmp = x -- GitLab