diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py index 908587c0d9c3112b14d6fe0efadcb673ecd49827..f948502d9683552f44e1e39e0eec7b9676c776c1 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_call_func.py @@ -65,7 +65,20 @@ def is_unsupported(func): Checks whether the func is supported by dygraph to static graph. """ - if any(func in m.__dict__.values() for m in BUILTIN_LIKELY_MODULES): + func_in_builtin_modules = False + for m in BUILTIN_LIKELY_MODULES: + for v in m.__dict__.values(): + func_in_dict = func == v + if isinstance(func_in_dict, list) or isinstance(func_in_dict, + numpy.ndarray): + func_in_dict = any(func_in_dict) + if func_in_dict: + func_in_builtin_modules = True + break + if func_in_builtin_modules: + break + + if func_in_builtin_modules: translator_logger.log( 2, "Whitelist: {} is part of built-in module and does not have to be transformed.". diff --git a/python/paddle/fluid/layers/rnn.py b/python/paddle/fluid/layers/rnn.py index fe8ed83923e88be2a0c98a8a539f26500b43b7cb..ed5fa02f9ac6c72b3255d90c8ce0690728f2c52b 100644 --- a/python/paddle/fluid/layers/rnn.py +++ b/python/paddle/fluid/layers/rnn.py @@ -619,7 +619,7 @@ def _rnn_static_graph(cell, inputs = map_structure(rnn.step_input, inputs) states = map_structure(rnn.memory, initial_states) copy_states = map_structure(lambda x: x, states) - outputs, new_states = cell.call(inputs, copy_states, **kwargs) + outputs, new_states = cell(inputs, copy_states, **kwargs) assert_same_structure(states, new_states) if sequence_length: step_mask = rnn.step_input(mask) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py new file mode 100644 index 0000000000000000000000000000000000000000..e83128f045d8b94e8ec335c5dcc6ad8ca07548e4 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py @@ -0,0 +1,56 @@ +# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import paddle +import unittest +from paddle import nn + + +class Net(nn.Layer): + def __init__(self, in_channels, hidden_size): + super(Net, self).__init__() + self.lstm = nn.LSTM( + in_channels, hidden_size, direction='bidirectional', num_layers=2) + + @paddle.jit.to_static + def forward(self, x): + x, _ = self.lstm(x) + return x + + +class TestLstm(unittest.TestCase): + def run_lstm(self, to_static): + paddle.jit.ProgramTranslator().enable(to_static) + + paddle.disable_static() + paddle.static.default_main_program().random_seed = 1001 + paddle.static.default_startup_program().random_seed = 1001 + + net = Net(12, 2) + x = paddle.zeros((2, 10, 12)) + y = net(paddle.to_tensor(x)) + return y.numpy() + + def test_lstm_to_static(self): + dygraph_out = self.run_lstm(to_static=False) + static_out = self.run_lstm(to_static=True) + self.assertTrue( + np.allclose(dygraph_out, static_out), + msg='dygraph_out is {}\n static_out is \n{}'.format(dygraph_out, + static_out)) + + +if __name__ == "__main__": + unittest.main()