From fd7fd0c4a47e276cd4770e16f722fbaa56c7bc32 Mon Sep 17 00:00:00 2001 From: zhhsplendid Date: Sun, 27 Sep 2020 12:40:44 +0000 Subject: [PATCH] [Dy2stat] Fix lstm bug, test=develop --- .../dygraph_to_static/convert_call_func.py | 15 ++++- python/paddle/fluid/layers/rnn.py | 2 +- .../unittests/dygraph_to_static/test_lstm.py | 56 +++++++++++++++++++ 3 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py index 908587c0d9c..f948502d968 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py @@ -65,7 +65,20 @@ def is_unsupported(func): Checks whether the func is supported by dygraph to static graph. """ - if any(func in m.__dict__.values() for m in BUILTIN_LIKELY_MODULES): + func_in_builtin_modules = False + for m in BUILTIN_LIKELY_MODULES: + for v in m.__dict__.values(): + func_in_dict = func == v + if isinstance(func_in_dict, list) or isinstance(func_in_dict, + numpy.ndarray): + func_in_dict = any(func_in_dict) + if func_in_dict: + func_in_builtin_modules = True + break + if func_in_builtin_modules: + break + + if func_in_builtin_modules: translator_logger.log( 2, "Whitelist: {} is part of built-in module and does not have to be transformed.". diff --git a/python/paddle/fluid/layers/rnn.py b/python/paddle/fluid/layers/rnn.py index fe8ed83923e..ed5fa02f9ac 100644 --- a/python/paddle/fluid/layers/rnn.py +++ b/python/paddle/fluid/layers/rnn.py @@ -619,7 +619,7 @@ def _rnn_static_graph(cell, inputs = map_structure(rnn.step_input, inputs) states = map_structure(rnn.memory, initial_states) copy_states = map_structure(lambda x: x, states) - outputs, new_states = cell.call(inputs, copy_states, **kwargs) + outputs, new_states = cell(inputs, copy_states, **kwargs) assert_same_structure(states, new_states) if sequence_length: step_mask = rnn.step_input(mask) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py new file mode 100644 index 00000000000..e83128f045d --- /dev/null +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py @@ -0,0 +1,56 @@ +# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import paddle +import unittest +from paddle import nn + + +class Net(nn.Layer): + def __init__(self, in_channels, hidden_size): + super(Net, self).__init__() + self.lstm = nn.LSTM( + in_channels, hidden_size, direction='bidirectional', num_layers=2) + + @paddle.jit.to_static + def forward(self, x): + x, _ = self.lstm(x) + return x + + +class TestLstm(unittest.TestCase): + def run_lstm(self, to_static): + paddle.jit.ProgramTranslator().enable(to_static) + + paddle.disable_static() + paddle.static.default_main_program().random_seed = 1001 + paddle.static.default_startup_program().random_seed = 1001 + + net = Net(12, 2) + x = paddle.zeros((2, 10, 12)) + y = net(paddle.to_tensor(x)) + return y.numpy() + + def test_lstm_to_static(self): + dygraph_out = self.run_lstm(to_static=False) + static_out = self.run_lstm(to_static=True) + self.assertTrue( + np.allclose(dygraph_out, static_out), + msg='dygraph_out is {}\n static_out is \n{}'.format(dygraph_out, + static_out)) + + +if __name__ == "__main__": + unittest.main() -- GitLab