From a61909ff47e559726bd51ad8694779b372e62636 Mon Sep 17 00:00:00 2001 From: peizhilin Date: Wed, 14 Nov 2018 10:32:22 +0800 Subject: [PATCH] test=develop --- .../fluid/framework/ir/attention_lstm_fuse_pass.cc | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/paddle/fluid/framework/ir/attention_lstm_fuse_pass.cc b/paddle/fluid/framework/ir/attention_lstm_fuse_pass.cc index ecefab32bbe..d61ff04bc72 100644 --- a/paddle/fluid/framework/ir/attention_lstm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/attention_lstm_fuse_pass.cc @@ -212,11 +212,11 @@ void PrepareLSTMWeight(const LoDTensor& W_forget_w0, float* out_data = out->mutable_data(platform::CPUPlace()); std::array tensors{ - {W_forget_w0.data(), W_input_w0.data(), - W_output_w0.data(), W_cell_w0.data()}}; + W_forget_w0.data(), W_input_w0.data(), + W_output_w0.data(), W_cell_w0.data()}; std::array tensors1{ - {W_forget_w1.data(), W_input_w1.data(), - W_output_w1.data(), W_cell_w1.data()}}; + W_forget_w1.data(), W_input_w1.data(), + W_output_w1.data(), W_cell_w1.data()}; for (int row = 0; row < D; row++) { for (int col = 0; col < 4; col++) { @@ -239,8 +239,8 @@ void PrepareLSTMBias(const LoDTensor& B_forget, const LoDTensor& B_input, const LoDTensor& B_output, const LoDTensor& B_cell, LoDTensor* out) { std::array tensors{ - {B_forget.data(), B_input.data(), B_output.data(), - B_cell.data()}}; + B_forget.data(), B_input.data(), B_output.data(), + B_cell.data()}; PADDLE_ENFORCE_EQ(B_forget.dims().size(), 1); int D = B_forget.dims()[0]; -- GitLab