From 71053063a105bf64e08ae5826019c05cb7639b3b Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Fri, 23 Feb 2018 14:32:35 +0800 Subject: [PATCH] test Parallel.Do and DynRNN --- python/paddle/v2/fluid/layers/control_flow.py | 3 +- .../tests/book/test_understand_sentiment.py | 57 +++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/python/paddle/v2/fluid/layers/control_flow.py b/python/paddle/v2/fluid/layers/control_flow.py index b9ab28a86a2..72056cc7cdf 100644 --- a/python/paddle/v2/fluid/layers/control_flow.py +++ b/python/paddle/v2/fluid/layers/control_flow.py @@ -652,7 +652,8 @@ class While(object): parent_block.append_op( type='while', inputs={ - 'X': [parent_block.var(x_name) for x_name in x_name_list], + 'X': + [parent_block.var_recursive(x_name) for x_name in x_name_list], 'Condition': [self.cond_var] }, outputs={'Out': out_vars, diff --git a/python/paddle/v2/fluid/tests/book/test_understand_sentiment.py b/python/paddle/v2/fluid/tests/book/test_understand_sentiment.py index af917de8e33..61f46b51c44 100644 --- a/python/paddle/v2/fluid/tests/book/test_understand_sentiment.py +++ b/python/paddle/v2/fluid/tests/book/test_understand_sentiment.py @@ -47,6 +47,46 @@ def convolution_net(data, label, input_dim, class_dim=2, emb_dim=32, return avg_cost, accuracy, prediction +def dyn_rnn_lstm(data, label, input_dim, class_dim=2, emb_dim=32, + lstm_size=128): + emb = fluid.layers.embedding( + input=data, size=[input_dim, emb_dim], is_sparse=True) + sentence = fluid.layers.fc(input=emb, size=lstm_size, act='tanh') + + rnn = fluid.layers.DynamicRNN() + with rnn.block(): + word = rnn.step_input(sentence) + prev_hidden = rnn.memory(value=0.0, shape=[lstm_size]) + prev_cell = rnn.memory(value=0.0, shape=[lstm_size]) + + def gate_common(ipt, hidden, size): + gate0 = fluid.layers.fc(input=ipt, size=size, bias_attr=True) + gate1 = fluid.layers.fc(input=hidden, size=size, bias_attr=False) + return gate0 + gate1 + + forget_gate = fluid.layers.sigmoid(x=gate_common(word, prev_hidden, + lstm_size)) + input_gate = fluid.layers.sigmoid(x=gate_common(word, prev_hidden, + lstm_size)) + output_gate = fluid.layers.sigmoid(x=gate_common(word, prev_hidden, + lstm_size)) + cell_gate = fluid.layers.sigmoid(x=gate_common(word, prev_hidden, + lstm_size)) + + cell = forget_gate * prev_cell + input_gate * cell_gate + hidden = output_gate * fluid.layers.tanh(x=cell) + rnn.update_memory(prev_cell, cell) + rnn.update_memory(prev_hidden, hidden) + rnn.output(hidden) + + last = fluid.layers.sequence_last_step(rnn()) + prediction = fluid.layers.fc(input=last, size=class_dim, act="softmax") + cost = fluid.layers.cross_entropy(input=prediction, label=label) + avg_cost = fluid.layers.mean(x=cost) + accuracy = fluid.layers.accuracy(input=prediction, label=label) + return avg_cost, accuracy, prediction + + def stacked_lstm_net(data, label, input_dim, @@ -270,6 +310,23 @@ class TestUnderstandSentiment(unittest.TestCase): use_cuda=True, parallel=True) + @unittest.skip(reason='make CI faster') + def test_dynrnn_lstm_gpu(self): + with self.new_program_scope(): + main( + self.word_dict, + net_method=dyn_rnn_lstm, + use_cuda=True, + parallel=False) + + def test_dynrnn_lstm_gpu_parallel(self): + with self.new_program_scope(): + main( + self.word_dict, + net_method=dyn_rnn_lstm, + use_cuda=True, + parallel=True) + if __name__ == '__main__': unittest.main() -- GitLab