提交 9e24fa3a 编写于 作者: M minqiyang

Polish code

test=develop
上级 1c116462
......@@ -115,7 +115,6 @@ framework::Variable* CreateVariable(const std::string& name,
varname = string::Sprintf("%s@%d", varname, id);
}
LOG(ERROR) << "creating var " << varname;
VLOG(3) << "creating var " << varname;
framework::Variable* var = scope->Var(varname);
framework::LoDTensor* tensor = var->GetMutable<framework::LoDTensor>();
......@@ -183,8 +182,6 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
<< framework::vectorize(var->Get<framework::LoDTensor>().dims()).size();
}
LOG(ERROR) << "grad_op_desc_" << grad_op_desc_->Proto()->DebugString();
for (const std::string& outvar : grad_op_desc_->OutputArgumentNames()) {
VLOG(3) << "op grad output var " << outvar;
block_->FindRecursiveOrCreateVar(outvar);
......@@ -194,8 +191,6 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
framework::VarDesc* var_desc = block_->FindVar(outvar);
if (var_desc->GetType() == framework::proto::VarType::LOD_TENSOR) {
var->GetMutable<framework::LoDTensor>();
// framework::Tensor* tensor = var->GetMutable<framework::LoDTensor>();
// tensor->mutable_data(platform::CPUPlace());
} else {
LOG(ERROR) << "tracer doesn't support yet";
}
......
......@@ -110,8 +110,6 @@ class CrossEntropyGradientOpKernel : public framework::OpKernel<T> {
auto* dy = ctx.Input<Tensor>(framework::GradVarName("Y"));
auto* label = ctx.Input<Tensor>("Label");
auto* dx = ctx.Output<Tensor>(framework::GradVarName("X"));
LOG(ERROR) << "CROSS ENTROPY GRAD DX: "
<< ctx.op().Output(framework::GradVarName("X"));
T* dx_data = dx->mutable_data<T>(ctx.GetPlace());
// Following computation only depends on the last dimension size. So it's
......
......@@ -1281,8 +1281,6 @@ class Block(object):
"""
op_desc = self.desc.append_op()
op = Operator(block=self, desc=op_desc, *args, **kwargs)
print("append_op", kwargs.get("type"), kwargs.get("stop_gradient",
False))
if _in_imperative_mode():
_imperative_tracer().trace(op.iop, [v._ivar for v in op.inputs],
[v._ivar for v in op.outputs], self.desc,
......@@ -1336,8 +1334,6 @@ class Block(object):
def _prepend_op(self, *args, **kwargs):
op_desc = self.desc._prepend_op()
op = Operator(self, op_desc, *args, **kwargs)
print("prepend_op", kwargs.get("type"), kwargs.get("stop_gradient",
False))
if _in_imperative_mode():
_imperative_tracer().trace(op.iop, [v._ivar for v in op.inputs],
[v._ivar for v in op.outputs], self.desc,
......
......@@ -115,9 +115,7 @@ class TestImperativeMnist(unittest.TestCase):
label._stop_gradient = True
predict = mnist(img)
print(predict.shape, predict.dtype, label.shape, label.dtype)
out = fluid.layers.cross_entropy(predict, label)
print(out.shape, out.dtype)
out._backward()
filter_grad = mnist._simple_img_conv_pool_1._conv2d._filter_param._gradient(
)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册