diff --git a/paddle/fluid/imperative/layer.cc b/paddle/fluid/imperative/layer.cc index 2176ac78bb6c9f3cf877dd9f66cf558e6f87711d..612503768079472ba233ee3fcd43a47fdba9a0cc 100644 --- a/paddle/fluid/imperative/layer.cc +++ b/paddle/fluid/imperative/layer.cc @@ -75,16 +75,6 @@ class Autograd { } private: - void AccumGrads(int grad_idx, Variable* grad, - std::vector* op_grads) { - if (!(*op_grads)[grad_idx]) { - // FIXME(panyx0718): This should be a deep copy. - (*op_grads)[grad_idx] = grad; - return; - } - AddTo(grad, (*op_grads)[grad_idx]); - } - std::map ComputeDepCounts(OpBase* op) { std::map ret; @@ -108,14 +98,6 @@ class Autograd { return ret; } - std::vector CreateOpGrads(size_t count) { - std::vector op_grads; - for (size_t i = 0; i < count; ++i) { - op_grads.push_back(nullptr); - } - return op_grads; - } - framework::Scope* scope_; }; @@ -133,7 +115,7 @@ framework::Variable* CreateVariable(const std::string& name, varname = string::Sprintf("%s@%d", varname, id); } - LOG(ERROR) << "creating var " << varname; + VLOG(3) << "creating var " << varname; framework::Variable* var = scope->Var(varname); framework::LoDTensor* tensor = var->GetMutable(); @@ -165,22 +147,25 @@ std::vector OpBase::ApplyGrad(framework::Scope* scope) { for (const std::string& grad_invar : grad_op_desc_->InputArgumentNames()) { if (grad_to_var_->find(grad_invar) == grad_to_var_->end()) { + // grad op inputs can be forward inputs, so not in grad_to_var. continue; } - LOG(ERROR) << "op grad in var " << grad_invar; + VLOG(3) << "op grad in var " << grad_invar; block_->FindRecursiveOrCreateVar(grad_invar); framework::Variable* var = scope->Var(grad_invar); const std::string& invar = grad_to_var_->at(grad_invar); for (VarBase* varbase : *output_vars_) { + // Use the accumulated grads_ by sharing the input with grads_. if (varbase->var_desc_->Name() == invar) { var->GetMutable()->ShareDataWith( varbase->grads_->Get()); + break; } } } for (const std::string& outvar : grad_op_desc_->OutputArgumentNames()) { - LOG(ERROR) << "grad outvar " << outvar; + VLOG(3) << "grad outvar " << outvar; block_->FindRecursiveOrCreateVar(outvar); framework::Variable* var = scope->Var(outvar); if (!var->IsInitialized()) { @@ -199,6 +184,7 @@ std::vector OpBase::ApplyGrad(framework::Scope* scope) { opbase->Run(*scope, platform::CPUPlace()); + // `ret` matches exactly with `input_vars_` of forward op. std::vector ret; for (size_t i = 0; i < input_vars_->size(); ++i) { bool found = false; @@ -207,7 +193,7 @@ std::vector OpBase::ApplyGrad(framework::Scope* scope) { VarBase* origin_var = (*input_vars_)[i]; std::string orig_var = grad_to_var_->at(outvar); PADDLE_ENFORCE(origin_var->var_desc_->Name() == orig_var); - LOG(ERROR) << "apply grad " << outvar << " with origin " << orig_var; + VLOG(3) << "apply grad " << outvar << " with origin " << orig_var; origin_var->ApplyGrad(scope, var); found = true; ret.push_back(var); diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index 0cf0c27a6a9b9f9fc78dfc331977f9ac20e87cb9..85a71ca83d21ed2595ddbe684300a46c05fed3af 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -36,10 +36,7 @@ class VarBase { var_(nullptr), grads_(nullptr) {} - virtual ~VarBase() { - LOG(ERROR) << "deleting var"; - LOG(ERROR) << "done deleting var"; - } + virtual ~VarBase() {} void ApplyGrad(framework::Scope* scope, framework::Variable* grad); diff --git a/paddle/fluid/imperative/tracer.h b/paddle/fluid/imperative/tracer.h index ff87993ffcaf88790cbae80a850cc8b8aabd1347..433d07c0e5aa0986ab1e9fe349ef865d2851c0c0 100644 --- a/paddle/fluid/imperative/tracer.h +++ b/paddle/fluid/imperative/tracer.h @@ -55,7 +55,7 @@ class Tracer { framework::BlockDesc* block) { framework::Scope* scope = GetScope(block); framework::OpDesc* op_desc = op->op_desc_; - LOG(ERROR) << "tracer tracing " << op_desc->Type(); + VLOG(3) << "tracer tracing " << op_desc->Type(); op_desc->InferShape(*block); op_desc->InferVarType(block); std::unique_ptr op_base = diff --git a/tools/print_signatures.py b/tools/print_signatures.py index e2805c4e7e6aa26a5865b64a874feef672bf9b36..90c6626ecd63a9efeb725d136b6177cb03fab51d 100644 --- a/tools/print_signatures.py +++ b/tools/print_signatures.py @@ -27,6 +27,8 @@ import pydoc member_dict = collections.OrderedDict() +experimental_namespace = {"paddle.fluid.imperative"} + def visit_member(parent_name, member): cur_name = ".".join([parent_name, member.__name__]) @@ -50,6 +52,8 @@ def visit_member(parent_name, member): def visit_all_module(mod): + if (mod.__name__ in experimental_namespace): + return for member_name in ( name for name in (mod.__all__ if hasattr(mod, "__all__") else dir(mod))