diff --git a/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py b/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py index fce587c5921b0ffeb72010a3ed8774d79d2d51be..fc0e3d190ef31422712666af54a85df407664184 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py @@ -29,20 +29,18 @@ from utils import DyGraphProgramDescTracerTestHelper class SimpleNet(fluid.Layer): def __init__(self, - name_scope, hidden_size, vocab_size, num_steps=20, init_scale=0.1, is_sparse=False, dtype='float32'): - super(SimpleNet, self).__init__(name_scope) + super(SimpleNet, self).__init__() self.hidden_size = hidden_size self.vocab_size = vocab_size self.init_scale = init_scale self.num_steps = num_steps self.embedding = Embedding( - self.full_name(), size=[vocab_size, hidden_size], dtype=dtype, is_sparse=is_sparse, @@ -100,7 +98,6 @@ class TestDygraphSimpleNet(unittest.TestCase): fluid.default_main_program().random_seed = seed simple_net = SimpleNet( - "simple_net", hidden_size=hidden_size, vocab_size=vocab_size, num_steps=num_steps, @@ -120,7 +117,7 @@ class TestDygraphSimpleNet(unittest.TestCase): for i in range(batch_num): x_data = np.arange(12).reshape(4, 3).astype('int64') y_data = np.arange(1, 13).reshape(4, 3).astype('int64') - x_data = x_data.reshape((-1, num_steps, 1)) + x_data = x_data.reshape((-1, num_steps)) y_data = y_data.reshape((-1, 1)) x = to_variable(x_data) @@ -143,7 +140,6 @@ class TestDygraphSimpleNet(unittest.TestCase): fluid.default_main_program().random_seed = seed simple_net = SimpleNet( - "simple_net", hidden_size=hidden_size, vocab_size=vocab_size, num_steps=num_steps, @@ -153,7 +149,7 @@ class TestDygraphSimpleNet(unittest.TestCase): exe = fluid.Executor(place) sgd = SGDOptimizer(learning_rate=1e-3) x = fluid.layers.data( - name="x", shape=[-1, num_steps, 1], dtype='int64') + name="x", shape=[-1, num_steps], dtype='int64') y = fluid.layers.data(name="y", shape=[-1, 1], dtype=dtype) static_loss = simple_net(x, y) @@ -172,7 +168,7 @@ class TestDygraphSimpleNet(unittest.TestCase): for i in range(batch_num): x_data = np.arange(12).reshape(4, 3).astype('int64') y_data = np.arange(1, 13).reshape(4, 3).astype('int64') - x_data = x_data.reshape((-1, num_steps, 1)) + x_data = x_data.reshape((-1, num_steps)) y_data = y_data.reshape((-1, 1)) fetch_list = [static_loss] fetch_list.extend(static_param_name_list) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows.py b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows.py index ec68ff9be969849df90367bb0425feee8619d809..c9e0093f643968d6bddc0ab743ac3343c9e78eee 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows.py @@ -24,10 +24,9 @@ import paddle.fluid.core as core class SimpleNet(fluid.Layer): - def __init__(self, name_scope, vocab_size, hidden_size, dtype): - super(SimpleNet, self).__init__(name_scope) + def __init__(self, vocab_size, hidden_size, dtype): + super(SimpleNet, self).__init__() self.emb = fluid.dygraph.Embedding( - self.full_name(), size=[vocab_size, hidden_size], dtype=dtype, param_attr='emb.w', @@ -53,11 +52,10 @@ class TestSimpleNet(unittest.TestCase): adam = SGDOptimizer(learning_rate=0.001) # grad_clip = fluid.dygraph_grad_clip.GradClipByGlobalNorm(5.0) - input_word = np.array( - [[[1], [2]], [[2], [1]]]).astype('int64') + input_word = np.array([[1, 2], [2, 1]]).astype('int64') input = to_variable(input_word) - simplenet = SimpleNet("SimpleNet", 20, 32, dtype) + simplenet = SimpleNet(20, 32, dtype) input_emb, emb = simplenet(input) try: @@ -99,11 +97,10 @@ class TestSimpleNet(unittest.TestCase): grad_clip = fluid.dygraph_grad_clip.GradClipByGlobalNorm( 5.0) - input_word = np.array( - [[[1], [2]], [[2], [1]]]).astype('int64') + input_word = np.array([[1, 2], [2, 1]]).astype('int64') input = to_variable(input_word) - simplenet = SimpleNet("SimpleNet", 20, 32, "float32") + simplenet = SimpleNet(20, 32, "float32") input_emb, emb = simplenet(input) try: diff --git a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py index 04776a3838904c9268c12788edfac417500cd081..4471573142ff79b579ac7a233dbf1dafa2000609 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py @@ -30,20 +30,18 @@ from paddle.fluid.dygraph.jit import TracedLayer class SimpleNet(fluid.Layer): def __init__(self, - name_scope, hidden_size, vocab_size, num_steps=20, init_scale=0.1, is_sparse=False, dtype='float32'): - super(SimpleNet, self).__init__(name_scope) + super(SimpleNet, self).__init__() self.hidden_size = hidden_size self.vocab_size = vocab_size self.init_scale = init_scale self.num_steps = num_steps self.embedding = Embedding( - self.full_name(), size=[vocab_size, hidden_size], dtype=dtype, is_sparse=is_sparse, @@ -109,7 +107,6 @@ class TestDygraphSimpleNet(unittest.TestCase): fluid.default_main_program().random_seed = seed simple_net = SimpleNet( - "simple_net", hidden_size=hidden_size, vocab_size=vocab_size, num_steps=num_steps, @@ -130,7 +127,7 @@ class TestDygraphSimpleNet(unittest.TestCase): for i in range(batch_num): x_data = np.arange(12).reshape(4, 3).astype('int64') y_data = np.arange(1, 13).reshape(4, 3).astype('int64') - x_data = x_data.reshape((-1, num_steps, 1)) + x_data = x_data.reshape((-1, num_steps)) y_data = y_data.reshape((-1, 1)) x = to_variable(x_data) @@ -153,7 +150,6 @@ class TestDygraphSimpleNet(unittest.TestCase): fluid.default_main_program().random_seed = seed simple_net = SimpleNet( - "simple_net", hidden_size=hidden_size, vocab_size=vocab_size, num_steps=num_steps, @@ -163,7 +159,7 @@ class TestDygraphSimpleNet(unittest.TestCase): exe = fluid.Executor(place) sgd = SGDOptimizer(learning_rate=1e-3) x = fluid.layers.data( - name="x", shape=[-1, num_steps, 1], dtype='int64') + name="x", shape=[-1, num_steps], dtype='int64') y = fluid.layers.data(name="y", shape=[-1, 1], dtype=dtype) static_loss = simple_net(x, y) @@ -182,7 +178,7 @@ class TestDygraphSimpleNet(unittest.TestCase): for i in range(batch_num): x_data = np.arange(12).reshape(4, 3).astype('int64') y_data = np.arange(1, 13).reshape(4, 3).astype('int64') - x_data = x_data.reshape((-1, num_steps, 1)) + x_data = x_data.reshape((-1, num_steps)) y_data = y_data.reshape((-1, 1)) fetch_list = [static_loss] fetch_list.extend(static_param_name_list)