From 0c4354b2e0a5745a713990d617bfc90d1fbc7dc4 Mon Sep 17 00:00:00 2001 From: risemeup1 <62429225+risemeup1@users.noreply.github.com> Date: Wed, 23 Aug 2023 10:54:34 +0800 Subject: [PATCH] fix coverage compile error (#56546) * fix coverage compile error * test --- test/cpp/fluid/CMakeLists.txt | 1 - test/cpp/fluid/prim_ops/CMakeLists.txt | 51 -- test/cpp/fluid/prim_ops/prim_op_test.cc | 756 ------------------------ 3 files changed, 808 deletions(-) delete mode 100644 test/cpp/fluid/prim_ops/CMakeLists.txt delete mode 100644 test/cpp/fluid/prim_ops/prim_op_test.cc diff --git a/test/cpp/fluid/CMakeLists.txt b/test/cpp/fluid/CMakeLists.txt index 11e0a1ec870..f91f472713b 100644 --- a/test/cpp/fluid/CMakeLists.txt +++ b/test/cpp/fluid/CMakeLists.txt @@ -20,7 +20,6 @@ add_subdirectory(nccl) if(WITH_PSCORE) add_subdirectory(pscore) endif() -add_subdirectory(prim_ops) add_subdirectory(reader) add_subdirectory(reduce_ops) # TODO(gouzil): enable this after the bug is fixed. windows: Exit code 0xc000007b, pr: #53470 diff --git a/test/cpp/fluid/prim_ops/CMakeLists.txt b/test/cpp/fluid/prim_ops/CMakeLists.txt deleted file mode 100644 index 4357325ff8c..00000000000 --- a/test/cpp/fluid/prim_ops/CMakeLists.txt +++ /dev/null @@ -1,51 +0,0 @@ -set(PRIM_OP_PATH ${PADDLE_SOURCE_DIR}/paddle/fluid/operators/prim_ops/) - -set(PRIM_OP_SRCS - ${PRIM_OP_PATH}reshape_p_op.cc - ${PRIM_OP_PATH}broadcast_p_op.cc - ${PRIM_OP_PATH}reduce_sum_p_op.cc - ${PRIM_OP_PATH}transpose_p_op.cc - ${PRIM_OP_PATH}split_p_op.cc - ${PRIM_OP_PATH}concat_p_op.cc - ${PRIM_OP_PATH}slice_select_p_op.cc - ${PRIM_OP_PATH}slice_assign_p_op.cc - ${PRIM_OP_PATH}gather_p_op.cc - ${PRIM_OP_PATH}scatter_add_p_op.cc - ${PRIM_OP_PATH}add_p_op.cc - ${PRIM_OP_PATH}sub_p_op.cc - ${PRIM_OP_PATH}mul_p_op.cc - ${PRIM_OP_PATH}div_p_op.cc - ${PRIM_OP_PATH}sqrt_p_op.cc - ${PRIM_OP_PATH}tanh_p_op.cc - ${PRIM_OP_PATH}sin_p_op.cc - ${PRIM_OP_PATH}cos_p_op.cc - ${PRIM_OP_PATH}exp_p_op.cc - ${PRIM_OP_PATH}matmul_p_op.cc - ${PRIM_OP_PATH}fill_constant_p_op.cc - ${PRIM_OP_PATH}log_p_op.cc - ${PRIM_OP_PATH}select_p_op.cc - ${PRIM_OP_PATH}eq_p_op.cc - ${PRIM_OP_PATH}gt_p_op.cc - ${PRIM_OP_PATH}ge_p_op.cc - ${PRIM_OP_PATH}ne_p_op.cc - ${PRIM_OP_PATH}pow_p_op.cc - ${PRIM_OP_PATH}max_p_op.cc - ${PRIM_OP_PATH}erf_p_op.cc - ${PRIM_OP_PATH}bernoulli_p_op.cc - ${PRIM_OP_PATH}abs_p_op.cc - ${PRIM_OP_PATH}cast_p_op.cc - ${PRIM_OP_PATH}rsqrt_p_op.cc - ${PRIM_OP_PATH}uniform_random_p_op.cc) -if(WITH_CINN) - set(CINN_DEPS cinn_compiler) -endif() -cc_test_old( - prim_op_test - SRCS - prim_op_test.cc - ${PRIM_OP_SRCS} - DEPS - fleet_executor - ${CINN_DEPS} - op_registry - python) diff --git a/test/cpp/fluid/prim_ops/prim_op_test.cc b/test/cpp/fluid/prim_ops/prim_op_test.cc deleted file mode 100644 index 0d3c1f8a0cf..00000000000 --- a/test/cpp/fluid/prim_ops/prim_op_test.cc +++ /dev/null @@ -1,756 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "gtest/gtest.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/fluid/framework/program_desc.h" - -USE_OP_ITSELF(reshape_p); -USE_OP_ITSELF(broadcast_p); -USE_OP_ITSELF(reduce_sum_p); -USE_OP_ITSELF(transpose_p); -USE_OP_ITSELF(split_p); -USE_OP_ITSELF(concat_p); -USE_OP_ITSELF(slice_select_p); -USE_OP_ITSELF(slice_assign_p); -USE_OP_ITSELF(gather_p); -USE_OP_ITSELF(scatter_add_p); -USE_OP_ITSELF(add_p); -USE_OP_ITSELF(sub_p); -USE_OP_ITSELF(mul_p); -USE_OP_ITSELF(div_p); -USE_OP_ITSELF(sqrt_p); -USE_OP_ITSELF(tanh_p); -USE_OP_ITSELF(matmul_p); -USE_OP_ITSELF(fill_constant_p); -USE_OP_ITSELF(log_p); -USE_OP_ITSELF(select_p); -USE_OP_ITSELF(eq_p); -USE_OP_ITSELF(pow_p); -USE_OP_ITSELF(max_p); -USE_OP_ITSELF(erf_p); -USE_OP_ITSELF(bernoulli_p); - -namespace paddle { -namespace framework { - -static void NewVar(BlockDesc *block, - const std::string &name, - const std::vector &shape) { - auto *var_desc = block->Var(name); - if (!shape.empty()) { - var_desc->SetShape(shape); - var_desc->SetType(proto::VarType::LOD_TENSOR); - var_desc->SetDataType(proto::VarType_Type_FP32); - } -} - -static void AppendOp(BlockDesc *block, - const std::string &type, - VariableNameMap inputs, - VariableNameMap outputs, - AttributeMap attrs) { - auto &op_info = OpInfoMap::Instance().Get(type); - if (op_info.Checker()) { - op_info.Checker()->Check(&attrs); - } - - auto *op = block->AppendOp(); - op->SetType(type); - for (auto &pair : inputs) { - op->SetInput(pair.first, pair.second); - } - - for (auto &pair : outputs) { - op->SetOutput(pair.first, pair.second); - for (auto &var_name : pair.second) { - if (!block->FindVarRecursive(var_name)) { - NewVar(block, var_name, {}); - } - } - } - - op->SetAttrMap(attrs); - op->InferVarType(block); - op->InferShape(*block); -} - -TEST(PrimOp, reshape_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, - "reshape_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"shape", std::vector{12, 5}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 2UL); - ASSERT_EQ(shapes[0], 12L); - ASSERT_EQ(shapes[1], 5L); -} - -TEST(PrimOp, broadcast_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 1}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, - "broadcast_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"shape", std::vector{3, 4, 5}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, reduce_sum_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape); - AppendOp(block, - "reduce_sum_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"axis", std::vector{0, 2}}, {"keepdim", false}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 1UL); - ASSERT_EQ(shapes[0], 4L); - AppendOp(block, - "reduce_sum_p", - {{"X", {x0}}}, - {{"Y", {x2}}}, - {{"axis", std::vector{0, 2}}, {"keepdim", true}}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 1L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 1L); -} - -TEST(PrimOp, transpose_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, - "transpose_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"axis", std::vector{2, 1, 0}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 5L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 3L); -} - -TEST(PrimOp, split_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{6, 8, 10}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - std::string x3 = "x3"; - - NewVar(block, x0, shape); - AppendOp(block, - "split_p", - {{"X", {x0}}}, - {{"YS", {x1, x2, x3}}}, - {{"axis", int64_t{1}}, - {"num_or_sections", std::vector{2, 4, 2}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 2L); - ASSERT_EQ(shapes[2], 10L); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 10L); - ASSERT_EQ(block->Var("x3")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x3")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x3")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 2L); - ASSERT_EQ(shapes[2], 10L); - std::string x4 = "x4"; - std::string x5 = "x5"; - AppendOp( - block, - "split_p", - {{"X", {x0}}}, - {{"YS", {x4, x5}}}, - {{"axis", int64_t{2}}, {"num_or_sections", std::vector{2}}}); - ASSERT_EQ(block->Var("x4")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x4")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x4")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 8L); - ASSERT_EQ(shapes[2], 5L); - ASSERT_EQ(block->Var("x5")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x5")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x5")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 8L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, concat_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape_0{3, 1, 5}; - std::vector shape_1{3, 4, 5}; - std::vector shape_2{3, 6, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - std::string x3 = "x3"; - - NewVar(block, x0, shape_0); - NewVar(block, x1, shape_1); - NewVar(block, x2, shape_2); - AppendOp(block, - "concat_p", - {{"XS", {x0, x1, x2}}}, - {{"Y", {x3}}}, - {{"axis", int64_t{1}}}); - ASSERT_EQ(block->Var("x3")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x3")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x3")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 11L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, slice_select_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{6, 8, 10}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, - "slice_select_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"axis", std::vector{0, 1, 2}}, - {"starts", std::vector{0, 0, 0}}, - {"ends", std::vector{5, 7, 9}}, - {"strides", std::vector{2, 2, 2}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, slice_assign_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape_0{6, 8, 10}; - std::vector shape_1{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape_0); - NewVar(block, x1, shape_1); - AppendOp(block, - "slice_assign_p", - {{"X", {x0}}, {"Y", {x1}}}, - {{"Z", {x2}}}, - {{"axis", std::vector{0, 1, 2}}, - {"starts", std::vector{0, 0, 0}}, - {"ends", std::vector{5, 7, 9}}, - {"strides", std::vector{2, 2, 2}}}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 8L); - ASSERT_EQ(shapes[2], 10L); -} - -TEST(PrimOp, gather_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{6, 8, 10}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, - "gather_p", - {{"X", {x0}}}, - {{"Y", {x1}}}, - {{"axis", int64_t{1}}, {"index", std::vector{0, 2, 5}}}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 3L); - ASSERT_EQ(shapes[2], 10L); - std::string index_t = "index_t"; - std::string x2 = "x2"; - - auto *var_desc = block->Var(index_t); - var_desc->SetShape(std::vector{3}); - var_desc->SetType(proto::VarType::LOD_TENSOR); - var_desc->SetDataType(proto::VarType_Type_INT32); - AppendOp(block, - "gather_p", - {{"X", {x0}}, {"IndexTensor", {index_t}}}, - {{"Y", {x2}}}, - {{"axis", int64_t{1}}}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 3L); - ASSERT_EQ(shapes[2], 10L); -} - -TEST(PrimOp, scatter_add_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape_0{6, 8, 10}; - std::vector shape_1{6, 3, 10}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape_0); - NewVar(block, x1, shape_1); - AppendOp(block, - "scatter_add_p", - {{"X", {x0}}, {"Y", {x1}}}, - {{"Z", {x2}}}, - {{"axis", int64_t{1}}, {"index", std::vector{0, 2, 5}}}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 8L); - ASSERT_EQ(shapes[2], 10L); - std::string index_t = "index_t"; - std::string x3 = "x3"; - - auto *var_desc = block->Var(index_t); - var_desc->SetShape(std::vector{3}); - var_desc->SetType(proto::VarType::LOD_TENSOR); - var_desc->SetDataType(proto::VarType_Type_INT32); - AppendOp(block, - "scatter_add_p", - {{"X", {x0}}, {"Y", {x1}}, {"IndexTensor", {index_t}}}, - {{"Z", {x3}}}, - {{"axis", int64_t{1}}}); - ASSERT_EQ(block->Var("x3")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x3")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x3")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 6L); - ASSERT_EQ(shapes[1], 8L); - ASSERT_EQ(shapes[2], 10L); -} - -TEST(PrimOp, add_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape); - NewVar(block, x1, shape); - AppendOp(block, "add_p", {{"X", {x0}}, {"Y", {x1}}}, {{"Z", {x2}}}, {}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, sub_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape); - NewVar(block, x1, shape); - AppendOp(block, "sub_p", {{"X", {x0}}, {"Y", {x1}}}, {{"Z", {x2}}}, {}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, mul_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape); - NewVar(block, x1, shape); - AppendOp(block, "mul_p", {{"X", {x0}}, {"Y", {x1}}}, {{"Z", {x2}}}, {}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, div_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape); - NewVar(block, x1, shape); - AppendOp(block, "div_p", {{"X", {x0}}, {"Y", {x1}}}, {{"Z", {x2}}}, {}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, sqrt_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, "sqrt_p", {{"X", {x0}}}, {{"Y", {x1}}}, {}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, tanh_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, "tanh_p", {{"X", {x0}}}, {{"Y", {x1}}}, {}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, matmul_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape_0{3, 4, 5}; - std::vector shape_1{3, 5, 8}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - std::string x2 = "x2"; - - NewVar(block, x0, shape_0); - NewVar(block, x1, shape_1); - AppendOp(block, "matmul_p", {{"X", {x0}}, {"Y", {x1}}}, {{"Z", {x2}}}, {}); - ASSERT_EQ(block->Var("x2")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x2")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x2")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 8L); - std::vector shape_2{4, 5}; - std::vector shape_3{5, 8}; - - std::string x3 = "x3"; - std::string x4 = "x4"; - std::string x5 = "x5"; - - NewVar(block, x3, shape_2); - NewVar(block, x4, shape_3); - AppendOp(block, "matmul_p", {{"X", {x3}}, {"Y", {x4}}}, {{"Z", {x5}}}, {}); - ASSERT_EQ(block->Var("x5")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x5")->GetDataType(), proto::VarType_Type_FP32); - shapes = block->Var("x5")->GetShape(); - ASSERT_EQ(shapes.size(), 2UL); - ASSERT_EQ(shapes[0], 4L); - ASSERT_EQ(shapes[1], 8L); -} - -TEST(PrimOp, fill_constant_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::string x0 = "x0"; - - AppendOp(block, - "fill_constant_p", - {{}}, - {{"Y", {x0}}}, - {{"value", 0.0f}, - {"dtype", proto::VarType_Type_FP32}, - {"shape", std::vector{3, 4, 5}}}); - ASSERT_EQ(block->Var("x0")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x0")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x0")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, log_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, "log_p", {{"X", {x0}}}, {{"Y", {x1}}}, {}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, select_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{2, 3}; - - std::string cond = "cond"; - std::string x = "x"; - std::string y = "y"; - std::string z = "z"; - - NewVar(block, cond, shape); - NewVar(block, x, shape); - NewVar(block, y, shape); - - AppendOp(block, - "select_p", - {{"Condition", {cond}}, {"X", {x}}, {"Y", {y}}}, - {{"Z", {z}}}, - {}); - ASSERT_EQ(block->Var("z")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("z")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("z")->GetShape(); - ASSERT_EQ(shapes.size(), 2UL); - ASSERT_EQ(shapes[0], 2L); - ASSERT_EQ(shapes[1], 3L); -} - -TEST(PrimOp, eq_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x = "x"; - std::string y = "y"; - std::string z = "z"; - - NewVar(block, x, shape); - NewVar(block, y, shape); - AppendOp(block, "eq_p", {{"X", {x}}, {"Y", {y}}}, {{"Z", {z}}}, {}); - ASSERT_EQ(block->Var("z")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("z")->GetDataType(), proto::VarType::BOOL); - auto shapes = block->Var("z")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, pow_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x = "x"; - std::string y = "y"; - std::string z = "z"; - - NewVar(block, x, shape); - NewVar(block, y, shape); - AppendOp(block, "pow_p", {{"X", {x}}, {"Y", {y}}}, {{"Z", {z}}}, {}); - ASSERT_EQ(block->Var("z")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("z")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("z")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, max_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{2, 3, 4}; - - std::string x = "x"; - std::string y = "y"; - std::string z = "z"; - - NewVar(block, x, shape); - NewVar(block, y, shape); - - AppendOp(block, "max_p", {{"X", {x}}, {"Y", {y}}}, {{"Z", {z}}}, {}); - ASSERT_EQ(block->Var("z")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("z")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("z")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 2L); - ASSERT_EQ(shapes[1], 3L); - ASSERT_EQ(shapes[2], 4L); -} - -TEST(PrimOp, erf_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::vector shape{3, 4, 5}; - - std::string x0 = "x0"; - std::string x1 = "x1"; - - NewVar(block, x0, shape); - AppendOp(block, "erf_p", {{"X", {x0}}}, {{"Y", {x1}}}, {}); - ASSERT_EQ(block->Var("x1")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x1")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x1")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -TEST(PrimOp, bernoulli_p) { - ProgramDesc program; - auto *block = program.MutableBlock(0); - std::string x0 = "x0"; - - AppendOp(block, - "bernoulli_p", - {{}}, - {{"Y", {x0}}}, - {{"p", 0.5f}, - {"dtype", proto::VarType_Type_FP32}, - {"shape", std::vector{3, 4, 5}}}); - ASSERT_EQ(block->Var("x0")->GetType(), proto::VarType::LOD_TENSOR); - ASSERT_EQ(block->Var("x0")->GetDataType(), proto::VarType_Type_FP32); - auto shapes = block->Var("x0")->GetShape(); - ASSERT_EQ(shapes.size(), 3UL); - ASSERT_EQ(shapes[0], 3L); - ASSERT_EQ(shapes[1], 4L); - ASSERT_EQ(shapes[2], 5L); -} - -} // namespace framework -} // namespace paddle -- GitLab