From add62acfd1d7e8f14ceba22b1c049c25027e760e Mon Sep 17 00:00:00 2001 From: Leo Chen Date: Fri, 29 Nov 2019 11:25:47 +0800 Subject: [PATCH] remove kDepXOut for abs_grad op, test=develop (#21407) --- paddle/fluid/operators/activation_op.h | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index b94da368ce..339e9dedea 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -41,12 +41,6 @@ enum ActBwdOpFwdDeps { kNoDeps = 0x00, // Do not need any forward input/output kDepX = 0x01, // Only need forward input X kDepOut = 0x02, // Only need forward output Out - - // Never add kDepXOut, because Out can be always calculated - // by forward input X in backward part. - // FIXME(zjl): but in MKLDNN abs, X and Out are all needed... - // Developers should not rely on this enum value! - kDepXOut = 0x03 }; /* The following operator can be used to process SelectedRows, because the @@ -769,7 +763,7 @@ struct AbsGradFunctor : public BaseActivationFunctor { dx.device(d) = dout * x.sign(); } - static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepXOut; } + static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepX; } }; // reciprocal(x) = 1 / x -- GitLab