From e566b94fba2a3f5c48629841cbace40af8464fa3 Mon Sep 17 00:00:00 2001 From: Yang Yu Date: Tue, 26 Dec 2017 13:14:27 +0800 Subject: [PATCH] Revert C++ changes --- paddle/operators/tensor_array_read_write_op.cc | 11 ----------- paddle/operators/while_op.cc | 15 +-------------- 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/paddle/operators/tensor_array_read_write_op.cc b/paddle/operators/tensor_array_read_write_op.cc index 59a4dac9405..2ee9bf700c2 100644 --- a/paddle/operators/tensor_array_read_write_op.cc +++ b/paddle/operators/tensor_array_read_write_op.cc @@ -136,17 +136,6 @@ class ReadFromArrayOp : public ArrayOp { auto &dev_ctx = *pool.Borrow(place); framework::CopyFrom(x_array[offset], place, dev_ctx, out_tensor); out_tensor->set_lod(x_array[offset].lod()); - if (Input("X") == "dynamic_rnn_0_output_array_fc_0.tmp_0_0@GRAD") { - VLOG(10) << "Offset = " << offset; - if (x_array[offset].numel() != 0) { - auto d = x_array[offset].dims(); - std::ostringstream sout; - for (int64_t i = 0; i < d[0]; ++i) { - sout << x_array[offset].data()[0 * d[1]] << ", "; - } - VLOG(10) << "Grad = " << sout.str(); - } - } } else { VLOG(10) << "offset " << offset << " >= " << x_array.size(); } diff --git a/paddle/operators/while_op.cc b/paddle/operators/while_op.cc index d7c34297cd5..11ee96faad5 100644 --- a/paddle/operators/while_op.cc +++ b/paddle/operators/while_op.cc @@ -129,9 +129,6 @@ class WhileGradOp : public framework::OperatorBase { auto &og_inside = detail::Ref(cur_scope.Var(inside_og_name), "Cannot find inside gradient %s", inside_og_name); - - VLOG(10) << "OG " << outside_og_name << " Type is " - << og_outside.Type().name(); if (og_outside.Type().hash_code() == typeid(framework::LoDTensor).hash_code()) { auto &outside_tensor = og_outside.Get(); @@ -148,6 +145,7 @@ class WhileGradOp : public framework::OperatorBase { inside_array.resize(outside_array.size()); for (size_t j = 0; j < inside_array.size(); ++j) { + VLOG(10) << j << " " << outside_array[j].numel(); if (outside_array[j].numel() != 0) { inside_array[j].set_lod(outside_array[j].lod()); inside_array[j].ShareDataWith(outside_array[j]); @@ -200,17 +198,6 @@ class WhileGradOp : public framework::OperatorBase { auto sum_op = framework::OpRegistry::CreateOp( "sum", {{"X", {pg_names[param_id], new_inside_name}}}, {{"Out", {pg_names[param_id]}}}, framework::AttributeMap{}); - - VLOG(10) << "Accumulate the gradient of " << pg_names[param_id]; - - if (pg_names[param_id] == "W@GRAD") { - auto &w_g = detail::Ref(cur_scope.FindVar(new_inside_name)) - .Get(); - VLOG(10) << "W_G is" << w_g.data()[0]; - } else { - VLOG(10) << pg_names[param_id]; - } - sum_op->Run(cur_scope, dev_place); cur_scope.Rename(new_inside_name, inside_grad_name); } -- GitLab