提交 0507402a 编写于 作者: W WenmuZhou

fix export bug

上级 33f302e5
...@@ -57,6 +57,9 @@ class AttentionHead(nn.Layer): ...@@ -57,6 +57,9 @@ class AttentionHead(nn.Layer):
else: else:
targets = paddle.zeros(shape=[batch_size], dtype="int32") targets = paddle.zeros(shape=[batch_size], dtype="int32")
probs = None probs = None
char_onehots = None
outputs = None
alpha = None
for i in range(num_steps): for i in range(num_steps):
char_onehots = self._char_to_onehot( char_onehots = self._char_to_onehot(
...@@ -146,9 +149,6 @@ class AttentionLSTM(nn.Layer): ...@@ -146,9 +149,6 @@ class AttentionLSTM(nn.Layer):
else: else:
targets = paddle.zeros(shape=[batch_size], dtype="int32") targets = paddle.zeros(shape=[batch_size], dtype="int32")
probs = None probs = None
char_onehots = None
outputs = None
alpha = None
for i in range(num_steps): for i in range(num_steps):
char_onehots = self._char_to_onehot( char_onehots = self._char_to_onehot(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册