未验证 提交 f1df9dba 编写于 作者: J Jiabin Yang 提交者: GitHub

test=develop, update fluid.layers to LaryerHelper (#15797)

上级 f2e8409f
......@@ -40,6 +40,8 @@ class SimpleLSTMRNN(fluid.imperative.Layer):
self._dropout = dropout
self._input = None
self._num_steps = num_steps
from paddle.fluid.layer_helper import LayerHelper
self._helper = LayerHelper('SimpleLSTMRNN', act="tanh")
def _build_once(self, input_embedding, init_hidden=None, init_cell=None):
self.weight_1_arr = []
......@@ -50,17 +52,21 @@ class SimpleLSTMRNN(fluid.imperative.Layer):
self.mask_array = []
for i in range(self._num_layers):
weight_1 = fluid.layers.create_parameter(
weight_1 = self._helper.create_parameter(
attr=fluid.ParamAttr(
initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale)),
shape=[self._hidden_size * 2, self._hidden_size * 4],
dtype="float32",
name="fc_weight1_" + str(i),
default_initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale))
self.weight_1_arr.append(weight_1)
bias_1 = fluid.layers.create_parameter(
[self._hidden_size * 4],
bias_1 = self._helper.create_parameter(
attr=fluid.ParamAttr(
initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale)),
shape=[self._hidden_size * 4],
dtype="float32",
name="fc_bias1_" + str(i),
default_initializer=fluid.initializer.Constant(0.0))
self.bias_arr.append(bias_1)
......@@ -137,6 +143,8 @@ class PtbModel(fluid.imperative.Layer):
self.num_layers = num_layers
self.num_steps = num_steps
self.dropout = dropout
from paddle.fluid.layer_helper import LayerHelper
self._helper = LayerHelper('PtbModel', act="tanh")
self.simple_lstm_rnn = SimpleLSTMRNN(
hidden_size,
num_steps,
......@@ -151,16 +159,16 @@ class PtbModel(fluid.imperative.Layer):
name='embedding_para',
initializer=fluid.initializer.UniformInitializer(
low=-init_scale, high=init_scale)))
self.softmax_weight = fluid.layers.create_parameter(
[self.hidden_size, self.vocab_size],
self.softmax_weight = self._helper.create_parameter(
attr=fluid.ParamAttr(),
shape=[self.hidden_size, self.vocab_size],
dtype="float32",
name="softmax_weight",
default_initializer=fluid.initializer.UniformInitializer(
low=-self.init_scale, high=self.init_scale))
self.softmax_bias = fluid.layers.create_parameter(
[self.vocab_size],
self.softmax_bias = self._helper.create_parameter(
attr=fluid.ParamAttr(),
shape=[self.vocab_size],
dtype="float32",
name='softmax_bias',
default_initializer=fluid.initializer.UniformInitializer(
low=-self.init_scale, high=self.init_scale))
......@@ -256,7 +264,6 @@ class TestImperativePtbRnn(unittest.TestCase):
with new_program_scope():
fluid.default_startup_program().random_seed = seed
fluid.default_main_program().random_seed = seed
# TODO: marsyang1993 Change seed to
ptb_model = PtbModel(
hidden_size=hidden_size,
vocab_size=vocab_size,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册