未验证 提交 211940eb 编写于 作者: J Jack Zhou 提交者: GitHub

Fix rnn grad bug in cpu when dropout is zero (#37080)

* fix rnn grad bug when num_layers is set 2 and dropout_prob is set 0

* add more test for rnn
上级 072e7801
......@@ -965,6 +965,9 @@ class RNNCPUKernel : public framework::OpKernel<T> {
}
dropout_mask->mutable_data<uint8_t>(output->dims(), ctx.GetPlace());
auto& dev_ctx = ctx.template device_context<platform::CPUDeviceContext>();
math::SetConstant<platform::CPUDeviceContext, uint8_t> ones;
ones(dev_ctx, dropout_mask, static_cast<uint8_t>(1));
// init the output and allocate the memory
output->mutable_data<T>(ctx.GetPlace());
int gate_num = 4;
......
......@@ -168,5 +168,35 @@ class TestRNNOp4(TestRNNOp):
self.is_bidirec = True
class TestRNNOp5(TestRNNOp):
def set_attrs(self):
self.num_layers = 2
class TestRNNOp6(TestRNNOp):
def set_attrs(self):
self.num_layers = 2
self.is_bidirec = True
class TestRNNOp7(TestRNNOp):
def set_attrs(self):
self.num_layers = 2
self.is_bidirec = True
self.is_test = True
class TestRNNOp8(TestRNNOp):
def set_attrs(self):
self.num_layers = 2
self.is_bidirec = True
self.sequence_length = None
class TestRNNOp9(TestRNNOp):
def set_attrs(self):
self.num_layers = 3
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册