未验证 提交 b2034c28 编写于 作者: Z zhupengyang 提交者: GitHub

softmax: imperative->static; fix doc examples (#26134)

上级 b6d14d9d
......@@ -21,6 +21,7 @@ import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
import paddle
import paddle.nn.functional as F
np.random.seed(10)
......@@ -231,34 +232,33 @@ class TestNnFunctionalSoftmaxApi(unittest.TestCase):
self.out_ref = np.apply_along_axis(stable_softmax, -1, self.x_np)
def test_api_static(self):
train_program = Program()
startup_program = Program()
with program_guard(train_program, startup_program):
with program_guard(Program()):
x = paddle.data('X', self.x_np.shape, 'float32')
out = paddle.nn.functional.softmax(x)
out = F.softmax(x)
exe = paddle.static.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
self.assertEqual(np.allclose(self.out_ref, res[0]), True)
exe = paddle.Executor(self.place)
res = exe.run(train_program, feed={'X': self.x_np}, fetch_list=[out])
def test_api_imperative(self):
paddle.disable_static(self.place)
assert np.allclose(self.out_ref, res[0])
x = paddle.to_variable(self.x_np)
out = F.softmax(x)
self.assertEqual(np.allclose(self.out_ref, out.numpy()), True)
def test_api_imperative(self):
with paddle.imperative.guard(self.place):
x = paddle.imperative.to_variable(self.x_np)
out = paddle.nn.functional.softmax(x)
assert np.allclose(self.out_ref, out.numpy())
out = F.softmax(x, axis=0)
out_ref = np.apply_along_axis(stable_softmax, 0, self.x_np)
self.assertEqual(np.allclose(out_ref, out.numpy()), True)
out = paddle.nn.functional.softmax(x, axis=0)
out_ref = np.apply_along_axis(stable_softmax, 0, self.x_np)
assert np.allclose(out_ref, out.numpy())
paddle.enable_static()
def test_error(self):
with program_guard(Program(), Program()):
# The x should be variable and its dtype should be float32, float64.
self.assertRaises(TypeError, paddle.nn.functional.softmax, [1])
self.assertRaises(TypeError, F.softmax, [1])
x = paddle.data(name='x', shape=[2, 3], dtype='int32')
self.assertRaises(TypeError, paddle.nn.functional.softmax, x)
self.assertRaises(TypeError, F.softmax, x)
if __name__ == "__main__":
......
......@@ -401,7 +401,7 @@ def softmax(x, axis=-1, name=None):
import paddle.nn.functional as F
import numpy as np
paddle.enable_imperative()
paddle.disable_static()
x = np.array([[[2.0, 3.0, 4.0, 5.0],
[3.0, 4.0, 5.0, 6.0],
......@@ -409,7 +409,7 @@ def softmax(x, axis=-1, name=None):
[[1.0, 2.0, 3.0, 4.0],
[5.0, 6.0, 7.0, 8.0],
[6.0, 7.0, 8.0, 9.0]]], 'float32')
x = paddle.imperative.to_variable(x)
x = paddle.to_variable(x)
out = F.softmax(x)
# [[[0.0320586 , 0.08714432, 0.23688282, 0.64391426],
# [0.0320586 , 0.08714432, 0.23688282, 0.64391426],
......
......@@ -232,11 +232,11 @@ class LeakyReLU(layers.Layer):
import paddle
import numpy as np
paddle.enable_imperative()
paddle.disable_static()
lrelu = paddle.nn.LeakyReLU()
x = paddle.imperative.to_variable(np.array([-2, 0, 1], 'float32'))
out = lrelu(x) # [-0.02, 0, 1]
x = paddle.to_variable(np.array([-2, 0, 1], 'float32'))
out = lrelu(x) # [-0.02, 0., 1.]
"""
def __init__(self, alpha=1e-2, name=None):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册