未验证 提交 c2842377 编写于 作者: Q Qiyang Min 提交者: GitHub

Merge pull request #13837 from velconia/add_pyramid_dnn_support

Add margin_rank_loss_op to python
...@@ -127,6 +127,7 @@ paddle.fluid.layers.relu ArgSpec(args=['x', 'name'], varargs=None, keywords=None ...@@ -127,6 +127,7 @@ paddle.fluid.layers.relu ArgSpec(args=['x', 'name'], varargs=None, keywords=None
paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None)) paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None))
paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.margin_rank_loss ArgSpec(args=['label', 'left', 'right', 'margin', 'name'], varargs=None, keywords=None, defaults=(0.1, None))
paddle.fluid.layers.elu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=(1.0, None)) paddle.fluid.layers.elu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
paddle.fluid.layers.relu6 ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=(6.0, None)) paddle.fluid.layers.relu6 ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=(6.0, None))
paddle.fluid.layers.pow ArgSpec(args=['x', 'factor', 'name'], varargs=None, keywords=None, defaults=(1.0, None)) paddle.fluid.layers.pow ArgSpec(args=['x', 'factor', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
......
...@@ -107,6 +107,7 @@ __all__ = [ ...@@ -107,6 +107,7 @@ __all__ = [
'log', 'log',
'crop', 'crop',
'rank_loss', 'rank_loss',
'margin_rank_loss',
'elu', 'elu',
'relu6', 'relu6',
'pow', 'pow',
...@@ -5827,6 +5828,54 @@ def rank_loss(label, left, right, name=None): ...@@ -5827,6 +5828,54 @@ def rank_loss(label, left, right, name=None):
return out return out
def margin_rank_loss(label, left, right, margin=0.1, name=None):
"""
Margin Ranking Loss Layer for ranking problem,
which compares left score and right score passed in.
The ranking loss can be defined as following equation:
.. math::
rank\_loss &= max(0, -label * (left - right) + margin)
Args:
label (Variable): Indicates whether the left is ranked higher than the right or not.
left (Variable): Ranking score for left.
right (Variable): Ranking score for right.
margin (float): Indicates the given margin.
name (str|None): A name for this layer (optional). If set None, the layer
will be named automatically.
Returns:
Variable: The ranking loss.
Raises:
ValueError: Any of label, left, and right is not a Variable.
Examples:
.. code-block:: python
label = fluid.layers.data(name="label", shape=[4, 1], dtype="float32")
left = fluid.layers.data(name="left", shape=[4, 1], dtype="float32")
right = fluid.layers.data(name="right", shape=[4, 1], dtype="float32")
out = fluid.layers.margin_rank_loss(label, left, right)
"""
helper = LayerHelper('margin_rank_loss', **locals())
if not isinstance(label, Variable):
raise ValueError("The label should be a Variable.")
if not isinstance(left, Variable):
raise ValueError("The left should be a Variable.")
if not isinstance(right, Variable):
raise ValueError("The right should be a Variable.")
out = helper.create_tmp_variable(left.dtype)
act = helper.create_tmp_variable(left.dtype)
helper.append_op(
type='margin_rank_loss',
inputs={"Label": label,
"X1": left,
"X2": right},
outputs={'Out': out,
'Activated': act},
attrs={'margin': margin})
return out
def pad2d(input, def pad2d(input,
paddings=[0, 0, 0, 0], paddings=[0, 0, 0, 0],
mode='constant', mode='constant',
...@@ -6290,6 +6339,7 @@ def sequence_enumerate(input, win_size, pad_value=0, name=None): ...@@ -6290,6 +6339,7 @@ def sequence_enumerate(input, win_size, pad_value=0, name=None):
outputs={'Out': out}, outputs={'Out': out},
attrs={'win_size': win_size, attrs={'win_size': win_size,
'pad_value': pad_value}) 'pad_value': pad_value})
return out
def sequence_mask(x, maxlen=None, dtype='int64', name=None): def sequence_mask(x, maxlen=None, dtype='int64', name=None):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册