未验证 提交 70131b47 编写于 作者: C chentianyu03 提交者: GitHub

add warning message when dtypes of operator are not same (#31136)

* add error msg when dtypes of operator are not same

* add error msg when dtypes of operator are not same

* change error msg to warning msg when dtypes of operator are not same

* modify test case to fit for python2
上级 be61c2d0
...@@ -21,6 +21,7 @@ from . import no_grad ...@@ -21,6 +21,7 @@ from . import no_grad
import numpy as np import numpy as np
import six import six
import warnings
_supported_int_dtype_ = [ _supported_int_dtype_ = [
core.VarDesc.VarType.UINT8, core.VarDesc.VarType.UINT8,
...@@ -51,6 +52,11 @@ _supported_promote_complex_types_ = [ ...@@ -51,6 +52,11 @@ _supported_promote_complex_types_ = [
'__matmul__', '__matmul__',
] ]
_complex_dtypes = [
core.VarDesc.VarType.COMPLEX64,
core.VarDesc.VarType.COMPLEX128,
]
_already_patch_varbase = False _already_patch_varbase = False
...@@ -214,7 +220,9 @@ def monkey_patch_math_varbase(): ...@@ -214,7 +220,9 @@ def monkey_patch_math_varbase():
# 3. promote types or unify right var type to left var # 3. promote types or unify right var type to left var
rhs_dtype = other_var.dtype rhs_dtype = other_var.dtype
if lhs_dtype != rhs_dtype: if lhs_dtype != rhs_dtype:
if method_name in _supported_promote_complex_types_: if method_name in _supported_promote_complex_types_ and (
lhs_dtype in _complex_dtypes or
rhs_dtype in _complex_dtypes):
# only when lhs_dtype or rhs_dtype is complex type, # only when lhs_dtype or rhs_dtype is complex type,
# the dtype will promote, in other cases, directly # the dtype will promote, in other cases, directly
# use lhs_dtype, this is consistent will original rule # use lhs_dtype, this is consistent will original rule
...@@ -225,6 +233,9 @@ def monkey_patch_math_varbase(): ...@@ -225,6 +233,9 @@ def monkey_patch_math_varbase():
other_var = other_var if rhs_dtype == promote_dtype else astype( other_var = other_var if rhs_dtype == promote_dtype else astype(
other_var, promote_dtype) other_var, promote_dtype)
else: else:
warnings.warn(
'The dtype of left and right variables are not the same, left dtype is {}, but right dtype is {}, the right dtype will convert to {}'.
format(lhs_dtype, rhs_dtype, lhs_dtype))
other_var = astype(other_var, lhs_dtype) other_var = astype(other_var, lhs_dtype)
if reverse: if reverse:
......
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division
import unittest
import numpy as np
import warnings
import paddle
class TestTensorTypePromotion(unittest.TestCase):
def setUp(self):
self.x = paddle.to_tensor([2, 3])
self.y = paddle.to_tensor([1.0, 2.0])
def test_operator(self):
with warnings.catch_warnings(record=True) as context:
warnings.simplefilter("always")
self.x + self.y
self.assertTrue(
"The dtype of left and right variables are not the same" in
str(context[-1].message))
with warnings.catch_warnings(record=True) as context:
warnings.simplefilter("always")
self.x - self.y
self.assertTrue(
"The dtype of left and right variables are not the same" in
str(context[-1].message))
with warnings.catch_warnings(record=True) as context:
warnings.simplefilter("always")
self.x * self.y
self.assertTrue(
"The dtype of left and right variables are not the same" in
str(context[-1].message))
with warnings.catch_warnings(record=True) as context:
warnings.simplefilter("always")
self.x / self.y
self.assertTrue(
"The dtype of left and right variables are not the same" in
str(context[-1].message))
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册