提交 5b7633a5 编写于 作者: Q qiaolongfei

Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into public_to_protected

......@@ -30,6 +30,7 @@ static void ForEachVarName(const Map& names, T callback) {
}
}
// return whether all the names + suffixes in the set
static bool AllInSet(
const std::map<std::string, std::vector<std::string>>& names,
const std::string& suffix, const std::unordered_set<std::string>& set) {
......@@ -48,7 +49,7 @@ static std::shared_ptr<OperatorBase> NOP() {
return net_op;
}
// Get backward operator from a forward operator, recursively implementation.
// Get backward operator from a forward operator, a recursive implementation.
//
// no_grad_names the gradient variable names without gradient calculating.
//
......@@ -56,28 +57,31 @@ static std::shared_ptr<OperatorBase> NOP() {
// BackwardRecursive. use `uid = uniq_id++;` to get the unique index, and
// pass `uniq_id` through recursive calling.
//
// returns The backward operator. For simple situation, it is a simple
// operator. For complex situation, it is a NetOp.
// returns The backward operator. In a simple situation, it may be a simple
// operator, in a complex situation, it maybe a NetOp.
//
// See Backward.h for details
static std::shared_ptr<OperatorBase> BackwardRecursive(
const OperatorBase& forwardOp,
std::unordered_set<std::string>& no_grad_names, size_t& uniq_id);
std::shared_ptr<OperatorBase> BackwardRecursive(
const OperatorBase& forwardOp,
std::unordered_set<std::string>& no_grad_names, size_t& uniq_id) {
// If all input gradients of forwarding operator do not need to calculate,
// just return an NOP. Not return null ptr because NOP does not take
// too much time for calculation, but it is useful for simplifying logic.
if (AllInSet(forwardOp.Inputs(), kGradVarSuffix, no_grad_names)) {
if (AllInSet(forwardOp.Inputs() /*names*/, kGradVarSuffix /*suffix*/,
no_grad_names /*set*/)) {
return NOP();
}
// All output gradients of forwarding operator do not need to calculate.
// Then all input gradients cannot be computed at all, and we put them into
// `no_grad_names` set. Return an NOP.
if (AllInSet(forwardOp.Outputs(), kGradVarSuffix, no_grad_names)) {
ForEachVarName(forwardOp.Inputs(),
if (AllInSet(forwardOp.Output() /*names*/, kGradVarSuffix /*suffix*/,
no_grad_names /*set*/)) {
ForEachVarName(forwardOp.inputs_,
[&no_grad_names](const std::string& name) -> bool {
no_grad_names.insert(GradVarName(name));
return false;
......@@ -93,11 +97,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
auto& forwardNet = static_cast<const operators::NetOp&>(forwardOp);
// Map from output gradient variable name to operator's indices in
// backward net. That operator generates that variable.
// backward net's ops_. That operator generates that variable.
std::unordered_map<std::string, std::vector<size_t>> dup_output_ops;
size_t local_op_id = 0;
// reversely travel forwardNet
// reversely travel forwardNet and collect all duplicate outputs.
for (auto it = forwardNet.ops_.rbegin(); it != forwardNet.ops_.rend();
++it, ++local_op_id) {
auto fwd = *it;
......@@ -112,35 +116,41 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// Get unique ID for this method.
auto uid = uniq_id++;
// TODO(dzh): more comment
// multiple operators which have the same output (y for example) may
// overwrite the same y variable when backward, special operations are token
// to handle this case. For each duplicate output, rename it to an alias
// (original name with a offset), append an `add` op for its operator,
// and finally sum all the alias variable to the final output variable y.
using Pos = std::pair<size_t, std::shared_ptr<OperatorBase>>;
std::list<Pos> insert_position;
for (auto& dup_output_op : dup_output_ops) {
const std::string& name = dup_output_op.first;
auto& dup_op = dup_output_op.second;
// no duplicate output
if (dup_op.size() == 1) continue;
std::vector<std::string> dup_outputs;
// process the duplicate outputs
std::vector<std::string> dup_outputs;
for (size_t i = 0; i < dup_op.size(); ++i) {
// rename each duplicate output to an alias
auto op_offset = dup_op[i];
dup_outputs.push_back(name + "@RENAME@" + std::to_string(uid) + "@" +
std::to_string(i));
net->ops_[op_offset]->Rename(name, dup_outputs.back());
}
// collect all the offset to append `add` op for each alias
insert_position.push_back(
{dup_op.back(),
OpRegistry::CreateOp(
"add", {{"X", {dup_outputs}}}, {{"Out", {name}}},
{{"input_format",
std::vector<int>{0, static_cast<int>(dup_outputs.size())}}})});
{dup_op.back(), OpRegistry::CreateOp("add", {{"X", {dup_outputs}}},
{{"Out", {name}}}, {})});
}
// make sure the inserted `add` ops follow the BFS order.
insert_position.sort(
[](const Pos& l, const Pos& r) { return l.first > r.first; });
for (auto& pos : insert_position) {
net->InsertOp(pos.first + 1, pos.second);
}
} else {
std::shared_ptr<OperatorBase> grad_op = OpRegistry::CreateGradOp(forwardOp);
......@@ -176,7 +186,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
net->SetType("@GENERATED_BACKWARD@");
net->CompleteAddOp();
return net;
}
} // namespace framework
// See header for comments
std::shared_ptr<OperatorBase> Backward(
......
......@@ -298,8 +298,8 @@ def pnpair_evaluator(
input,
label,
info,
name=None,
weight=None, ):
weight=None,
name=None, ):
"""
Positive-negative pair rate Evaluator which adapts to rank task like
learning to rank. This evaluator must contain at least three layers.
......@@ -308,27 +308,31 @@ def pnpair_evaluator(
.. code-block:: python
eval = pnpair_evaluator(input, info, label)
eval = pnpair_evaluator(input, label, info)
:param name: Evaluator name.
:type name: None|basestring
:param input: Input Layer name. The output prediction of network.
:type input: LayerOutput
:param label: Label layer name.
:type label: LayerOutput
:param info: Label layer name. (TODO, explaination)
:param info: Info layer name. (TODO, explaination)
:type info: LayerOutput
:param weight: Weight Layer name. It should be a matrix with size
[sample_num, 1]. (TODO, explaination)
:type weight: LayerOutput
:param name: Evaluator name.
:type name: None|basestring
"""
if not isinstance(input, list):
input = [input]
if label:
input.append(label)
if info:
input.append(info)
evaluator_base(
name=name,
type="pnpair",
input=input,
label=label,
info=info,
weight=weight)
type="pnpair",
weight=weight,
name=name, )
@evaluator(EvaluatorAttribute.FOR_CLASSIFICATION)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册