未验证 提交 3678cae2 编写于 作者: 张春乔 提交者: GitHub

support fp16 on AlphaDropout (#50917)

上级 5f60b597
...@@ -980,6 +980,30 @@ class TestAlphaDropoutCAPI(unittest.TestCase): ...@@ -980,6 +980,30 @@ class TestAlphaDropoutCAPI(unittest.TestCase):
result.numpy(), result_np, rtol=1e-05 result.numpy(), result_np, rtol=1e-05
) )
def test_static_fp16_gpu(self):
if paddle.fluid.core.is_compiled_with_cuda():
place = paddle.CUDAPlace(0)
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
):
input = np.random.random([2, 3]).astype("float16")
x = paddle.static.data(name="x", shape=[2, 3], dtype="float16")
m = paddle.nn.AlphaDropout(p=0.0)
y = m(x)
exe = paddle.static.Executor(place)
res = exe.run(
paddle.static.default_main_program(),
feed={
"x": input,
},
fetch_list=[y],
)
np.testing.assert_allclose(res[0], input, rtol=1e-05)
class TestDropoutWithDeterminateSeedGenerator(unittest.TestCase): class TestDropoutWithDeterminateSeedGenerator(unittest.TestCase):
def setUp(self): def setUp(self):
......
...@@ -1384,7 +1384,7 @@ def alpha_dropout(x, p=0.5, training=True, name=None): ...@@ -1384,7 +1384,7 @@ def alpha_dropout(x, p=0.5, training=True, name=None):
Alpha Dropout fits well to SELU activate function by randomly setting activations to the negative saturation value. Alpha Dropout fits well to SELU activate function by randomly setting activations to the negative saturation value.
Args: Args:
x (Tensor): The input tensor. The data type is float32 or float64. x (Tensor): The input tensor. The data type is float16, float32 or float64.
p (float | int): Probability of setting units to zero. Default 0.5. p (float | int): Probability of setting units to zero. Default 0.5.
training (bool): A flag indicating whether it is in train phrase or not. Default True. training (bool): A flag indicating whether it is in train phrase or not. Default True.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
...@@ -1416,7 +1416,7 @@ def alpha_dropout(x, p=0.5, training=True, name=None): ...@@ -1416,7 +1416,7 @@ def alpha_dropout(x, p=0.5, training=True, name=None):
if not in_dynamic_mode(): if not in_dynamic_mode():
check_variable_and_dtype( check_variable_and_dtype(
x, 'x', ['float32', 'float64'], 'alpha_dropout' x, 'x', ['float16', 'float32', 'float64'], 'alpha_dropout'
) )
if training: if training:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册