diff --git a/paddle/fluid/operators/exponential_op.cc b/paddle/fluid/operators/exponential_op.cc deleted file mode 100644 index 52ddd9ebfa16f0a8990507ee40b45a6c0000872f..0000000000000000000000000000000000000000 --- a/paddle/fluid/operators/exponential_op.cc +++ /dev/null @@ -1,81 +0,0 @@ -/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include "paddle/fluid/framework/infershape_utils.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/phi/infermeta/unary.h" - -namespace paddle { -namespace operators { - -class ExponentialOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; - - protected: - phi::KernelKey GetExpectedKernelType( - const framework::ExecutionContext &ctx) const override { - return phi::KernelKey(OperatorWithKernel::IndicateVarDataType(ctx, "X"), - ctx.GetPlace()); - } -}; - -class ExponentialOpMaker : public framework::OpProtoAndCheckerMaker { - public: - void Make() override { - AddComment(R"DOC( -This operator fills the input tensor with random values sampled from a -exponential distribution. -)DOC"); - AddInput("X", "The input tensor."); - AddOutput("Out", "The output tensor of exponential OP."); - AddAttr( - "lambda", "lambd parameter of exponential distribution. [default 1.0].") - .SetDefault(1.0f); - } -}; - -template -class ExponentialGradOpMaker : public framework::SingleGradOpMaker { - public: - using framework::SingleGradOpMaker::SingleGradOpMaker; - - protected: - void Apply(GradOpPtr retv) const override { - retv->SetType("fill_any_like"); - retv->SetInput("X", this->OutputGrad("Out")); - retv->SetAttr("value", 0.0f); - retv->SetOutput("Out", this->InputGrad("X")); - } -}; - -} // namespace operators -} // namespace paddle - -namespace ops = paddle::operators; -namespace plat = paddle::platform; - -DECLARE_INPLACE_OP_INFERER(ExponentialInferer, {"X", "Out"}); - -DECLARE_INFER_SHAPE_FUNCTOR(exponential, - ExponentialInfershapeFunctor, - PD_INFER_META(phi::UnchangedInferMeta)); - -REGISTER_OPERATOR(exponential, - ops::ExponentialOp, - ops::ExponentialOpMaker, - ops::ExponentialGradOpMaker, - ops::ExponentialGradOpMaker, - ExponentialInferer, - ExponentialInfershapeFunctor); diff --git a/paddle/fluid/operators/generator/parse_utils.py b/paddle/fluid/operators/generator/parse_utils.py index 7e09706d21a3b4527c19ea7641a9298269876928..8f803fa4e70bdb69095fb1d4a04f6f2e802bc338 100644 --- a/paddle/fluid/operators/generator/parse_utils.py +++ b/paddle/fluid/operators/generator/parse_utils.py @@ -29,6 +29,12 @@ def to_named_dict(items: List[Dict], is_op=False) -> Dict[str, Dict]: item["name"] = ( item["name"] if item["name"][-1] != '_' else item["name"][:-1] ) + if "forward" in item: + item["forward"]["name"] = ( + item["forward"]["name"] + if item["forward"]["name"][-1] != '_' + else item["forward"]["name"][:-1] + ) name = item["name"] named_dict[name] = item else: diff --git a/paddle/phi/api/yaml/op_compat.yaml b/paddle/phi/api/yaml/op_compat.yaml index 4d1d29f5f68fee31a7e8d367236cf9eab22d2a14..28ffb681293d6e06693e60880502a6a692ec1c88 100755 --- a/paddle/phi/api/yaml/op_compat.yaml +++ b/paddle/phi/api/yaml/op_compat.yaml @@ -932,6 +932,15 @@ extra : attrs : [bool use_mkldnn = false, bool use_cudnn = false] +- op : exponential_ + backward : exponential__grad + inputs : + x : X + outputs : + out : Out + attrs : + lam : lambda + - op : eye outputs : out : Out diff --git a/paddle/phi/api/yaml/static_backward.yaml b/paddle/phi/api/yaml/static_backward.yaml index 9f725070e5b48df496de17feaa26a3a7357d8659..0c1d0414b11097389be3e0f4c6926e0ec8a55ce7 100755 --- a/paddle/phi/api/yaml/static_backward.yaml +++ b/paddle/phi/api/yaml/static_backward.yaml @@ -89,6 +89,14 @@ data_type : out_grad no_need_buffer : weight +- backward_op : exponential__grad + forward : exponential_ (Tensor x, float lam=1.0f) -> Tensor(out) + args : (Tensor out_grad) + output : Tensor(x_grad) + infer_meta : + func : UnchangedInferMeta + invoke : full_like(out_grad, 0.0f) + - backward_op : frobenius_norm_grad forward: frobenius_norm (Tensor x, IntArray axis={0}, bool keepdim=false, bool reduce_all=false, int in_dtype=-1, int out_dtype=-1) -> Tensor(out) args : (Tensor x, Tensor out, Tensor out_grad, IntArray axis={0}, bool keepdim=false, bool reduce_all=false, int in_dtype=-1, int out_dtype=-1) diff --git a/paddle/phi/api/yaml/static_ops.yaml b/paddle/phi/api/yaml/static_ops.yaml index abacbe9f716461548f34b81679b09d17e0873f8a..513af8a40e6e128db009c2c34b78b9eff93308d5 100755 --- a/paddle/phi/api/yaml/static_ops.yaml +++ b/paddle/phi/api/yaml/static_ops.yaml @@ -182,6 +182,17 @@ backend : x force_backend : force_cpu +- op : exponential_ + args : (Tensor x, float lam = 1.0f) + output : Tensor(out) + infer_meta : + func : UnchangedInferMeta + param : [x] + kernel : + func : exponential + inplace : (x -> out) + backward : exponential__grad + - op : eye args : (Scalar(int64_t) num_rows, Scalar(int64_t) num_columns = -1, DataType dtype = DataType::FLOAT32) output : Tensor(out) diff --git a/paddle/phi/ops/compat/exponential_sig.cc b/paddle/phi/ops/compat/exponential_sig.cc deleted file mode 100644 index 79bad59184233e2a19165a3a7363bd6322ba20af..0000000000000000000000000000000000000000 --- a/paddle/phi/ops/compat/exponential_sig.cc +++ /dev/null @@ -1,26 +0,0 @@ -/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include "paddle/phi/core/compat/op_utils.h" - -namespace phi { - -KernelSignature ExponentialOpArgumentMapping( - const ArgumentMappingContext& ctx UNUSED) { - return KernelSignature("exponential", {"X"}, {"lambda"}, {"Out"}); -} - -} // namespace phi - -PD_REGISTER_ARG_MAPPING_FN(exponential, phi::ExponentialOpArgumentMapping);