From 9e24fa3aeba2ce9b2bd23e625019c84723031685 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Thu, 20 Dec 2018 23:11:55 +0800 Subject: [PATCH] Polish code test=develop --- paddle/fluid/imperative/layer.cc | 5 ----- paddle/fluid/operators/cross_entropy_op.h | 2 -- python/paddle/fluid/framework.py | 4 ---- python/paddle/fluid/tests/unittests/test_imperative_mnist.py | 2 -- 4 files changed, 13 deletions(-) diff --git a/paddle/fluid/imperative/layer.cc b/paddle/fluid/imperative/layer.cc index 15e237a0e..ef6d8f401 100644 --- a/paddle/fluid/imperative/layer.cc +++ b/paddle/fluid/imperative/layer.cc @@ -115,7 +115,6 @@ framework::Variable* CreateVariable(const std::string& name, varname = string::Sprintf("%s@%d", varname, id); } - LOG(ERROR) << "creating var " << varname; VLOG(3) << "creating var " << varname; framework::Variable* var = scope->Var(varname); framework::LoDTensor* tensor = var->GetMutable(); @@ -183,8 +182,6 @@ std::vector OpBase::ApplyGrad(framework::Scope* scope) { << framework::vectorize(var->Get().dims()).size(); } - LOG(ERROR) << "grad_op_desc_" << grad_op_desc_->Proto()->DebugString(); - for (const std::string& outvar : grad_op_desc_->OutputArgumentNames()) { VLOG(3) << "op grad output var " << outvar; block_->FindRecursiveOrCreateVar(outvar); @@ -194,8 +191,6 @@ std::vector OpBase::ApplyGrad(framework::Scope* scope) { framework::VarDesc* var_desc = block_->FindVar(outvar); if (var_desc->GetType() == framework::proto::VarType::LOD_TENSOR) { var->GetMutable(); - // framework::Tensor* tensor = var->GetMutable(); - // tensor->mutable_data(platform::CPUPlace()); } else { LOG(ERROR) << "tracer doesn't support yet"; } diff --git a/paddle/fluid/operators/cross_entropy_op.h b/paddle/fluid/operators/cross_entropy_op.h index 2500c0443..f123e1154 100644 --- a/paddle/fluid/operators/cross_entropy_op.h +++ b/paddle/fluid/operators/cross_entropy_op.h @@ -110,8 +110,6 @@ class CrossEntropyGradientOpKernel : public framework::OpKernel { auto* dy = ctx.Input(framework::GradVarName("Y")); auto* label = ctx.Input("Label"); auto* dx = ctx.Output(framework::GradVarName("X")); - LOG(ERROR) << "CROSS ENTROPY GRAD DX: " - << ctx.op().Output(framework::GradVarName("X")); T* dx_data = dx->mutable_data(ctx.GetPlace()); // Following computation only depends on the last dimension size. So it's diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index dde08a79d..3dc23bd06 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -1281,8 +1281,6 @@ class Block(object): """ op_desc = self.desc.append_op() op = Operator(block=self, desc=op_desc, *args, **kwargs) - print("append_op", kwargs.get("type"), kwargs.get("stop_gradient", - False)) if _in_imperative_mode(): _imperative_tracer().trace(op.iop, [v._ivar for v in op.inputs], [v._ivar for v in op.outputs], self.desc, @@ -1336,8 +1334,6 @@ class Block(object): def _prepend_op(self, *args, **kwargs): op_desc = self.desc._prepend_op() op = Operator(self, op_desc, *args, **kwargs) - print("prepend_op", kwargs.get("type"), kwargs.get("stop_gradient", - False)) if _in_imperative_mode(): _imperative_tracer().trace(op.iop, [v._ivar for v in op.inputs], [v._ivar for v in op.outputs], self.desc, diff --git a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py index 85b613bdd..9d1e07999 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py @@ -115,9 +115,7 @@ class TestImperativeMnist(unittest.TestCase): label._stop_gradient = True predict = mnist(img) - print(predict.shape, predict.dtype, label.shape, label.dtype) out = fluid.layers.cross_entropy(predict, label) - print(out.shape, out.dtype) out._backward() filter_grad = mnist._simple_img_conv_pool_1._conv2d._filter_param._gradient( ) -- GitLab