From 4e504d7a5f2a4d588eb5a7bef9bcd806390f0271 Mon Sep 17 00:00:00 2001 From: liuxiao Date: Tue, 5 May 2020 15:03:44 +0800 Subject: [PATCH] add ops info for VM --- mindspore/ops/_op_impl/tbe/__init__.py | 7 +- mindspore/ops/_op_impl/tbe/adam_apply_one.py | 53 +++++++++++++++ mindspore/ops/_op_impl/tbe/apply_ftrl.py | 65 +++++++++++++++++++ mindspore/ops/_op_impl/tbe/gather_nd.py | 47 ++++++++++++++ .../{strideslice_d.py => strided_slice_d.py} | 0 ...slicegrad_d.py => strided_slice_grad_d.py} | 0 6 files changed, 170 insertions(+), 2 deletions(-) create mode 100644 mindspore/ops/_op_impl/tbe/adam_apply_one.py create mode 100644 mindspore/ops/_op_impl/tbe/apply_ftrl.py create mode 100644 mindspore/ops/_op_impl/tbe/gather_nd.py rename mindspore/ops/_op_impl/tbe/{strideslice_d.py => strided_slice_d.py} (100%) rename mindspore/ops/_op_impl/tbe/{strideslicegrad_d.py => strided_slice_grad_d.py} (100%) diff --git a/mindspore/ops/_op_impl/tbe/__init__.py b/mindspore/ops/_op_impl/tbe/__init__.py index 1b09b50cd..51d51fc77 100644 --- a/mindspore/ops/_op_impl/tbe/__init__.py +++ b/mindspore/ops/_op_impl/tbe/__init__.py @@ -19,8 +19,10 @@ from .abs_grad import _abs_grad_tbe from .adam_apply_one_with_decay import _adam_apply_one_with_decay_tbe from .add import _add_tbe from .add_n import _add_n_tbe +from .apply_ftrl import _apply_ftrl_tbe from .apply_momentum import _apply_momentum_tbe from .apply_adam import _apply_adam_tbe +from .adam_apply_one import _adam_apply_one_tbe from .assign import _assign_tbe from .assign_add import _assign_add_tbe from .assign_sub import _assign_sub_tbe @@ -67,13 +69,14 @@ from .squeeze import _squeeze_tbe from .tile import _tile_tbe from .atomic_addr_clean import _atomic_addr_clean_tbe from .gather_v2 import _gather_v2_tbe +from .gather_nd import _gather_nd_tbe from .bn_training_reduce import _bn_training_reduce_tbe from .bn_training_reduce_grad import _bn_training_reduce_grad_tbe from .bn_training_update import _bn_training_update_tbe from .bn_training_update_grad import _bn_training_update_grad_tbe from .reciprocal import _reciprocal_tbe -from .strideslice_d import _strided_slice_d_tbe -from .strideslicegrad_d import _strided_slice_grad_d_tbe +from .strided_slice_d import _strided_slice_d_tbe +from .strided_slice_grad_d import _strided_slice_grad_d_tbe from .split_d import _split_d_tbe from .exp import _exp_tbe from .div import _div_tbe diff --git a/mindspore/ops/_op_impl/tbe/adam_apply_one.py b/mindspore/ops/_op_impl/tbe/adam_apply_one.py new file mode 100644 index 000000000..edd12bf55 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/adam_apply_one.py @@ -0,0 +1,53 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""AdamApplyOne op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +adam_apply_one_op_info = TBERegOp("AdamApplyOne") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("adam_apply_one.so") \ + .compute_cost(10) \ + .kernel_name("adam_apply_one") \ + .partial_flag(True) \ + .input(0, "input0", False, "required", "all") \ + .input(1, "input1", False, "required", "all") \ + .input(2, "input2", False, "required", "all") \ + .input(3, "input3", False, "required", "all") \ + .input(4, "input4", False, "required", "all") \ + .input(5, "mul0_x", False, "required", "all") \ + .input(6, "mul1_x", False, "required", "all") \ + .input(7, "mul2_x", False, "required", "all") \ + .input(8, "mul3_x", False, "required", "all") \ + .input(9, "add2_y", False, "required", "all") \ + .output(0, "output0", False, "required", "all") \ + .output(1, "output1", False, "required", "all") \ + .output(2, "output2", False, "required", "all") \ + .dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_Default) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default) \ + .get_op_info() + + +@op_info_register(adam_apply_one_op_info) +def _adam_apply_one_tbe(): + """AdamApplyOne TBE register""" + return diff --git a/mindspore/ops/_op_impl/tbe/apply_ftrl.py b/mindspore/ops/_op_impl/tbe/apply_ftrl.py new file mode 100644 index 000000000..e37648191 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/apply_ftrl.py @@ -0,0 +1,65 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""ApplyFtrl op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +apply_ftrl_op_info = TBERegOp("ApplyFtrl") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("apply_ftrl.so") \ + .compute_cost(10) \ + .kernel_name("apply_ftrl") \ + .partial_flag(True) \ + .input(0, "var", False, "required", "all") \ + .input(1, "accum", False, "required", "all") \ + .input(2, "linear", False, "required", "all") \ + .input(3, "grad", False, "required", "all") \ + .input(4, "lr", False, "required", "all") \ + .input(5, "l1", False, "required", "all") \ + .input(6, "l2", False, "required", "all") \ + .input(7, "lr_power", False, "required", "all") \ + .output(0, "var", False, "required", "all") \ + .dtype_format(DataType.F16_5HD, DataType.F16_5HD, DataType.F16_5HD, DataType.F16_5HD, + DataType.F16_5HD, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_5HD) \ + .dtype_format(DataType.F16_FracZ, DataType.F16_FracZ, DataType.F16_FracZ, DataType.F16_FracZ, + DataType.F16_FracZ, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_FracZ) \ + .dtype_format(DataType.F16_C1HWNCoC0, DataType.F16_C1HWNCoC0, DataType.F16_C1HWNCoC0, DataType.F16_C1HWNCoC0, + DataType.F16_C1HWNCoC0, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_C1HWNCoC0) \ + .dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, DataType.F16_Default, + DataType.F16_Default) \ + .dtype_format(DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, + DataType.F32_5HD, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_5HD) \ + .dtype_format(DataType.F32_FracZ, DataType.F32_FracZ, DataType.F32_FracZ, DataType.F32_FracZ, + DataType.F32_FracZ, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_FracZ) \ + .dtype_format(DataType.F32_C1HWNCoC0, DataType.F32_C1HWNCoC0, DataType.F32_C1HWNCoC0, DataType.F32_C1HWNCoC0, + DataType.F32_C1HWNCoC0, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_C1HWNCoC0) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default) \ + .get_op_info() + + +@op_info_register(apply_ftrl_op_info) +def _apply_ftrl_tbe(): + """Applyftrl TBE register""" + return diff --git a/mindspore/ops/_op_impl/tbe/gather_nd.py b/mindspore/ops/_op_impl/tbe/gather_nd.py new file mode 100644 index 000000000..fceb3dfe7 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/gather_nd.py @@ -0,0 +1,47 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""GatherNd op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +gather_nd_op_info = TBERegOp("GatherNd") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("gather_nd.so") \ + .compute_cost(10) \ + .kernel_name("gather_nd") \ + .partial_flag(True) \ + .input(0, "x1", False, "required", "all") \ + .input(1, "x2", False, "required", "all") \ + .output(0, "y", False, "required", "all") \ + .dtype_format(DataType.I32_Default, DataType.I32_Default, DataType.I32_Default) \ + .dtype_format(DataType.I32_Default, DataType.I64_Default, DataType.I32_Default) \ + .dtype_format(DataType.F32_Default, DataType.I32_Default, DataType.F32_Default) \ + .dtype_format(DataType.F32_Default, DataType.I64_Default, DataType.F32_Default) \ + .dtype_format(DataType.F16_Default, DataType.I32_Default, DataType.F16_Default) \ + .dtype_format(DataType.F16_Default, DataType.I64_Default, DataType.F16_Default) \ + .dtype_format(DataType.I8_Default, DataType.I32_Default, DataType.I8_Default) \ + .dtype_format(DataType.I8_Default, DataType.I64_Default, DataType.I8_Default) \ + .dtype_format(DataType.U8_Default, DataType.I32_Default, DataType.U8_Default) \ + .dtype_format(DataType.U8_Default, DataType.I64_Default, DataType.U8_Default) \ + .dtype_format(DataType.BOOL_Default, DataType.I32_Default, DataType.BOOL_Default) \ + .dtype_format(DataType.BOOL_Default, DataType.I64_Default, DataType.BOOL_Default) \ + .get_op_info() + + +@op_info_register(gather_nd_op_info) +def _gather_nd_tbe(): + """GatherNd TBE register""" + return diff --git a/mindspore/ops/_op_impl/tbe/strideslice_d.py b/mindspore/ops/_op_impl/tbe/strided_slice_d.py similarity index 100% rename from mindspore/ops/_op_impl/tbe/strideslice_d.py rename to mindspore/ops/_op_impl/tbe/strided_slice_d.py diff --git a/mindspore/ops/_op_impl/tbe/strideslicegrad_d.py b/mindspore/ops/_op_impl/tbe/strided_slice_grad_d.py similarity index 100% rename from mindspore/ops/_op_impl/tbe/strideslicegrad_d.py rename to mindspore/ops/_op_impl/tbe/strided_slice_grad_d.py -- GitLab