提交 14db0680 编写于 作者: L lujun

merge conflict, test=develop

上级 92c8ac8a
...@@ -481,7 +481,7 @@ def dynamic_lstm(input, ...@@ -481,7 +481,7 @@ def dynamic_lstm(input,
forward, _ = fluid.layers.dynamic_lstm( forward, _ = fluid.layers.dynamic_lstm(
input=forward_proj, size=hidden_dim * 4, use_peepholes=False) input=forward_proj, size=hidden_dim * 4, use_peepholes=False)
""" """
assert _in_dygraph_mode( assert in_dygraph_mode(
) is not True, "please use lstm instead of dynamic_lstm in dygraph mode!" ) is not True, "please use lstm instead of dynamic_lstm in dygraph mode!"
assert bias_attr is not False, "bias_attr should not be False in dynamic_lstmp." assert bias_attr is not False, "bias_attr should not be False in dynamic_lstmp."
helper = LayerHelper('lstm', **locals()) helper = LayerHelper('lstm', **locals())
...@@ -867,7 +867,7 @@ def dynamic_lstmp(input, ...@@ -867,7 +867,7 @@ def dynamic_lstmp(input,
proj_activation="tanh") proj_activation="tanh")
""" """
assert _in_dygraph_mode( assert in_dygraph_mode(
) is not True, "please use lstm instead of dynamic_lstmp in dygraph mode!" ) is not True, "please use lstm instead of dynamic_lstmp in dygraph mode!"
assert bias_attr is not False, "bias_attr should not be False in dynamic_lstmp." assert bias_attr is not False, "bias_attr should not be False in dynamic_lstmp."
...@@ -1041,7 +1041,7 @@ def dynamic_gru(input, ...@@ -1041,7 +1041,7 @@ def dynamic_gru(input,
hidden = fluid.layers.dynamic_gru(input=x, size=hidden_dim) hidden = fluid.layers.dynamic_gru(input=x, size=hidden_dim)
""" """
assert _in_dygraph_mode( assert in_dygraph_mode(
) is not True, "please use gru instead of dynamic_gru in dygraph mode!" ) is not True, "please use gru instead of dynamic_gru in dygraph mode!"
helper = LayerHelper('gru', **locals()) helper = LayerHelper('gru', **locals())
...@@ -1760,7 +1760,7 @@ def sequence_conv(input, ...@@ -1760,7 +1760,7 @@ def sequence_conv(input,
Variable: output of sequence_conv Variable: output of sequence_conv
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_conv', **locals()) helper = LayerHelper('sequence_conv', **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -1821,7 +1821,7 @@ def sequence_softmax(input, use_cudnn=False, name=None): ...@@ -1821,7 +1821,7 @@ def sequence_softmax(input, use_cudnn=False, name=None):
dtype='float32', lod_level=1) dtype='float32', lod_level=1)
x_sequence_softmax = fluid.layers.sequence_softmax(input=x) x_sequence_softmax = fluid.layers.sequence_softmax(input=x)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_softmax', **locals()) helper = LayerHelper('sequence_softmax', **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -2315,7 +2315,7 @@ def sequence_pool(input, pool_type, is_test=False): ...@@ -2315,7 +2315,7 @@ def sequence_pool(input, pool_type, is_test=False):
last_x = fluid.layers.sequence_pool(input=x, pool_type='last') last_x = fluid.layers.sequence_pool(input=x, pool_type='last')
first_x = fluid.layers.sequence_pool(input=x, pool_type='first') first_x = fluid.layers.sequence_pool(input=x, pool_type='first')
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_pool', **locals()) helper = LayerHelper('sequence_pool', **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -2356,7 +2356,7 @@ def sequence_concat(input, name=None): ...@@ -2356,7 +2356,7 @@ def sequence_concat(input, name=None):
out = fluid.layers.sequence_concat(input=[seq1, seq2, seq3]) out = fluid.layers.sequence_concat(input=[seq1, seq2, seq3])
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_concat', **locals()) helper = LayerHelper('sequence_concat', **locals())
out = helper.create_variable_for_type_inference(dtype=helper.input_dtype()) out = helper.create_variable_for_type_inference(dtype=helper.input_dtype())
...@@ -2485,7 +2485,7 @@ def sequence_slice(input, offset, length, name=None): ...@@ -2485,7 +2485,7 @@ def sequence_slice(input, offset, length, name=None):
subseqs = fluid.layers.sequence_slice(input=seqs, offset=offset, subseqs = fluid.layers.sequence_slice(input=seqs, offset=offset,
length=length) length=length)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper("sequence_slice", **locals()) helper = LayerHelper("sequence_slice", **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -3946,7 +3946,7 @@ def sequence_expand(x, y, ref_level=-1, name=None): ...@@ -3946,7 +3946,7 @@ def sequence_expand(x, y, ref_level=-1, name=None):
dtype='float32', lod_level=1) dtype='float32', lod_level=1)
out = layers.sequence_expand(x=x, y=y, ref_level=0) out = layers.sequence_expand(x=x, y=y, ref_level=0)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_expand', input=x, **locals()) helper = LayerHelper('sequence_expand', input=x, **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -4014,7 +4014,7 @@ def sequence_expand_as(x, y, name=None): ...@@ -4014,7 +4014,7 @@ def sequence_expand_as(x, y, name=None):
dtype='float32', lod_level=1) dtype='float32', lod_level=1)
out = layers.sequence_expand_as(x=x, y=y) out = layers.sequence_expand_as(x=x, y=y)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_expand_as', input=x, **locals()) helper = LayerHelper('sequence_expand_as', input=x, **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -4062,7 +4062,7 @@ def sequence_pad(x, pad_value, maxlen=None, name=None): ...@@ -4062,7 +4062,7 @@ def sequence_pad(x, pad_value, maxlen=None, name=None):
out = fluid.layers.sequence_pad(x=x, pad_value=pad_value) out = fluid.layers.sequence_pad(x=x, pad_value=pad_value)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_pad', input=x, **locals()) helper = LayerHelper('sequence_pad', input=x, **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -4130,7 +4130,7 @@ def sequence_unpad(x, length, name=None): ...@@ -4130,7 +4130,7 @@ def sequence_unpad(x, length, name=None):
out = fluid.layers.sequence_unpad(x=x, length=len) out = fluid.layers.sequence_unpad(x=x, length=len)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_unpad', input=x, **locals()) helper = LayerHelper('sequence_unpad', input=x, **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -5305,7 +5305,7 @@ def sequence_reshape(input, new_dim): ...@@ -5305,7 +5305,7 @@ def sequence_reshape(input, new_dim):
x = fluid.layers.data(shape=[5, 20], dtype='float32', lod_level=1) x = fluid.layers.data(shape=[5, 20], dtype='float32', lod_level=1)
x_reshaped = fluid.layers.sequence_reshape(input=x, new_dim=10) x_reshaped = fluid.layers.sequence_reshape(input=x, new_dim=10)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_reshape', **locals()) helper = LayerHelper('sequence_reshape', **locals())
out = helper.create_variable_for_type_inference(helper.input_dtype()) out = helper.create_variable_for_type_inference(helper.input_dtype())
...@@ -5841,7 +5841,7 @@ def im2sequence(input, ...@@ -5841,7 +5841,7 @@ def im2sequence(input,
input=layer, stride=[1, 1], filter_size=[2, 2]) input=layer, stride=[1, 1], filter_size=[2, 2])
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
if isinstance(filter_size, int): if isinstance(filter_size, int):
...@@ -7620,7 +7620,7 @@ def sequence_scatter(input, index, updates, name=None): ...@@ -7620,7 +7620,7 @@ def sequence_scatter(input, index, updates, name=None):
output = fluid.layers.sequence_scatter(input, index, updates) output = fluid.layers.sequence_scatter(input, index, updates)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_scatter', **locals()) helper = LayerHelper('sequence_scatter', **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
...@@ -8710,7 +8710,7 @@ def sequence_enumerate(input, win_size, pad_value=0, name=None): ...@@ -8710,7 +8710,7 @@ def sequence_enumerate(input, win_size, pad_value=0, name=None):
x = fluid.layers.data(shape[30, 1], dtype='int32', lod_level=1) x = fluid.layers.data(shape[30, 1], dtype='int32', lod_level=1)
out = fluid.layers.sequence_enumerate(input=x, win_size=3, pad_value=0) out = fluid.layers.sequence_enumerate(input=x, win_size=3, pad_value=0)
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_enumerate', **locals()) helper = LayerHelper('sequence_enumerate', **locals())
out = helper.create_variable_for_type_inference( out = helper.create_variable_for_type_inference(
...@@ -8751,7 +8751,7 @@ def sequence_mask(x, maxlen=None, dtype='int64', name=None): ...@@ -8751,7 +8751,7 @@ def sequence_mask(x, maxlen=None, dtype='int64', name=None):
Variable: The output sequence mask. Variable: The output sequence mask.
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper('sequence_mask', **locals()) helper = LayerHelper('sequence_mask', **locals())
...@@ -9803,7 +9803,7 @@ def sequence_reverse(x, name=None): ...@@ -9803,7 +9803,7 @@ def sequence_reverse(x, name=None):
Returns: Returns:
out(${y_type}): ${y_comment} out(${y_type}): ${y_comment}
""" """
assert not _in_dygraph_mode(), ( assert not in_dygraph_mode(), (
"sequence layer is not supported in dygraph mode yet.") "sequence layer is not supported in dygraph mode yet.")
helper = LayerHelper("sequence_reverse", **locals()) helper = LayerHelper("sequence_reverse", **locals())
if name is None: if name is None:
......
...@@ -907,7 +907,7 @@ class TestBook(LayerTest): ...@@ -907,7 +907,7 @@ class TestBook(LayerTest):
if isinstance(dy_result, tuple): if isinstance(dy_result, tuple):
dy_result = dy_result[0] dy_result = dy_result[0]
self.assertTrue(np.array_equal(static_result[0], dy_result._numpy())) self.assertTrue(np.array_equal(static_result[0], dy_result.numpy()))
def _get_np_data(self, shape, dtype, append_batch_size=True): def _get_np_data(self, shape, dtype, append_batch_size=True):
np.random.seed(self.seed) np.random.seed(self.seed)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册