From f285191fb3ea451bc1171d19b7f1521254c80c60 Mon Sep 17 00:00:00 2001 From: baojun <32073718+baojun-nervana@users.noreply.github.com> Date: Tue, 26 Feb 2019 23:03:43 -0800 Subject: [PATCH] Added adam op test=develop (#15710) --- paddle/fluid/operators/ngraph/ops/adam_op.h | 79 +++++++++++++++++++ .../unittests/ngraph/test_adam_ngraph_op.py | 21 +++++ 2 files changed, 100 insertions(+) create mode 100644 paddle/fluid/operators/ngraph/ops/adam_op.h create mode 100644 python/paddle/fluid/tests/unittests/ngraph/test_adam_ngraph_op.py diff --git a/paddle/fluid/operators/ngraph/ops/adam_op.h b/paddle/fluid/operators/ngraph/ops/adam_op.h new file mode 100644 index 0000000000..beba5d3d23 --- /dev/null +++ b/paddle/fluid/operators/ngraph/ops/adam_op.h @@ -0,0 +1,79 @@ +/*Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#pragma once + +#include +#include +#include + +#include "ngraph/ngraph.hpp" +#include "paddle/fluid/operators/ngraph/ops/elementwise_scalar_op.h" +#include "paddle/fluid/operators/ngraph/ops/op_bridge.h" +#include "paddle/fluid/platform/ngraph_helper.h" + +namespace paddle { +namespace operators { +namespace ngraphs { + +void BuildAdamNode( + const std::shared_ptr& op, + std::shared_ptr< + std::unordered_map>> + ngb_node_map) { + auto op_attrs = framework::AttrReader(op->Attrs()); + auto beta1pow = platform::GetInputNode(op, "Beta1Pow", ngb_node_map); + auto beta2pow = platform::GetInputNode(op, "Beta2Pow", ngb_node_map); + auto grad = platform::GetInputNode(op, "Grad", ngb_node_map); + auto learning_rate = platform::GetInputNode(op, "LearningRate", ngb_node_map); + auto moment1 = platform::GetInputNode(op, "Moment1", ngb_node_map); + auto moment2 = platform::GetInputNode(op, "Moment2", ngb_node_map); + auto param = platform::GetInputNode(op, "Param", ngb_node_map); + + auto epsilon = op_attrs.Get("epsilon"); + auto beta2 = op_attrs.Get("beta2"); + auto beta1 = op_attrs.Get("beta1"); + + auto moment1_shape = moment1->get_shape(); + auto grad_shape = grad->get_shape(); + + auto moment1out = std::make_shared( + ElementwiseScalar(beta1, moment1), + ElementwiseScalar(1. - beta1, grad)); + + auto grad_square = std::make_shared(grad, grad); + auto moment2out = std::make_shared( + ElementwiseScalar(beta2, moment2), + ElementwiseScalar(1. - beta2, grad_square)); + auto node_sqrt = std::make_shared( + ElementwiseScalar(1., beta2pow)); + auto lr = std::make_shared( + node_sqrt, ElementwiseScalar(1., beta1pow)); + auto updated_lr = std::make_shared(learning_rate, lr); + + auto moment2_sqrt = std::make_shared(moment2out); + auto param_grad = std::make_shared( + moment1out, ElementwiseScalar(epsilon, moment2_sqrt)); + auto delta = ElementwiseScalar(updated_lr, param_grad); + auto param_out = std::make_shared(param, delta); + + platform::SetOutputNode(op, "Moment1Out", moment1out, ngb_node_map); + platform::SetOutputNode(op, "Moment2Out", moment2out, ngb_node_map); + platform::SetOutputNode(op, "ParamOut", param_out, ngb_node_map); +} +} // namespace ngraphs +} // namespace operators +} // namespace paddle + +REGISTER_NG_OP(adam, BuildAdamNode); diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_adam_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_adam_ngraph_op.py new file mode 100644 index 0000000000..ef2aedf65f --- /dev/null +++ b/python/paddle/fluid/tests/unittests/ngraph/test_adam_ngraph_op.py @@ -0,0 +1,21 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import unittest +from paddle.fluid.tests.unittests.test_adam_op import TestAdamOp1, TestAdamOp2, TestAdamOpMultipleSteps, TestSparseAdamOp + +if __name__ == "__main__": + unittest.main() -- GitLab